Compare commits
	
		
			344 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 7e48015a54 | ||
|  | 9ed3abf932 | ||
|  | 61762828a3 | ||
|  | 59beabe5ac | ||
|  | 0b30faa28c | ||
|  | d12d49b93f | ||
|  | f1d64d275a | ||
|  | d094eeeb03 | ||
|  | be25af658e | ||
|  | 794f52c229 | ||
|  | 5d4dc4ed4c | ||
|  | e49d97b898 | ||
|  | b6b4f1ba62 | ||
|  | 653d476716 | ||
|  | 48b855258c | ||
|  | c7efdaf5f9 | ||
|  | 22523ed3d3 | ||
|  | 33c602dd61 | ||
|  | e2a5509b76 | ||
|  | 61a0fa1a89 | ||
|  | a35bd8292b | ||
|  | 06c8ae60e3 | ||
|  | deeab1f845 | ||
|  | da81c4c987 | ||
|  | d180f1b2d5 | ||
|  | 526135629c | ||
|  | 6b9493e057 | ||
|  | 9bb33d2afc | ||
|  | 7421138533 | ||
|  | d0800c52bb | ||
|  | 913fcd4df2 | ||
|  | 83322cc725 | ||
|  | 5944501feb | ||
|  | 17e3603d3d | ||
|  | 95be43ae47 | ||
|  | feb91cbbaa | ||
|  | 79409af168 | ||
|  | 5dbfb64822 | ||
|  | 5e7ebf5e69 | ||
|  | e73215ca74 | ||
|  | a5f123b9ce | ||
|  | ac058e9675 | ||
|  | 371b764d1d | ||
|  | 66d7172e09 | ||
|  | 99d3a8a749 | ||
|  | db5ff372a4 | ||
|  | 3fe83f81be | ||
|  | 669e638fd6 | ||
|  | f1f999f3b6 | ||
|  | 6f3b6fa9ce | ||
|  | 938f945301 | ||
|  | e3efb2aad6 | ||
|  | 1e678c0d78 | ||
|  | a59c111140 | ||
|  | a8b2a31bed | ||
|  | 37402f9ee8 | ||
|  | e7b5ecb40f | ||
|  | c817ef04b9 | ||
|  | f52b18439c | ||
|  | 1e03c628d5 | ||
|  | 71fb39db1f | ||
|  | bcfb3726b0 | ||
|  | c6e9e29671 | ||
|  | 1bfefcce39 | ||
|  | 22488e93e1 | ||
|  | 244b89f035 | ||
|  | 1f9a241b94 | ||
|  | 03641aae42 | ||
|  | a2bdd113cc | ||
|  | a92e2f3c7b | ||
|  | 97766b3a57 | ||
|  | 9ef4c3bb06 | ||
|  | d82f0cd757 | ||
|  | 5f529e2af4 | ||
|  | beadd9e02b | ||
|  | 72543789cb | ||
|  | 5789439fa9 | ||
|  | f549126bcf | ||
|  | 7197548bad | ||
|  | 241fde783c | ||
|  | 2b872cd1f4 | ||
|  | a606fb4d1d | ||
|  | 9f9c6be38e | ||
|  | 01ee524049 | ||
|  | af9cb65338 | ||
|  | 8aa11c580b | ||
|  | ada627f444 | ||
|  | a7b6d338c3 | ||
|  | 9f00538b97 | ||
|  | a085015282 | ||
|  | 0b9c220fbb | ||
|  | 0e3d04873d | ||
|  | b7578d939f | ||
|  | b5c28de03f | ||
|  | e17d25c156 | ||
|  | c25dc1b99c | ||
|  | a493a574bd | ||
|  | 4284493dce | ||
|  | 25059de8e1 | ||
|  | 1731b05ad0 | ||
|  | e80dc663ac | ||
|  | 39988a4c2f | ||
|  | 415bff303a | ||
|  | a65eb62a54 | ||
|  | 03b2982128 | ||
|  | bff0527857 | ||
|  | f3b7634254 | ||
|  | 6a9593c0b9 | ||
|  | edb785b8e5 | ||
|  | 26d757b50a | ||
|  | 535079ee87 | ||
|  | ac380c29c1 | ||
|  | 3fd212f26c | ||
|  | 04a3abc651 | ||
|  | 6caf85ddd1 | ||
|  | 16e4071508 | ||
|  | 69e7c4324b | ||
|  | a1c4a8cbe5 | ||
|  | e37f6cfda7 | ||
|  | 989c804409 | ||
|  | 7345bc3c82 | ||
|  | 69bee35700 | ||
|  | 598e24df7c | ||
|  | 0ae669201e | ||
|  | f52a8a4642 | ||
|  | 9c40b61ef2 | ||
|  | 72dabcda83 | ||
|  | 161a06dbcc | ||
|  | 8ed3d4e70c | ||
|  | a4223ccc8a | ||
|  | ca85923855 | ||
|  | 52bfe7c493 | ||
|  | 4786bd0cbe | ||
|  | cadab160ff | ||
|  | 6a7f17b2b0 | ||
|  | 4986a4d775 | ||
|  | 903af0c2cf | ||
|  | 3282fa803c | ||
|  | 67cc47608d | ||
|  | 0411704b8b | ||
|  | 1de85b2c69 | ||
|  | 33b012f29d | ||
|  | 1357584df3 | ||
|  | e15809e271 | ||
|  | 0da1950427 | ||
|  | e590b921be | ||
|  | 09462692f5 | ||
|  | c1d1b5f762 | ||
|  | 6b9c87b858 | ||
|  | 485b6eb904 | ||
|  | 057630bdb5 | ||
|  | 6b02873b30 | ||
|  | 0fa0fc6d6b | ||
|  | 339ec07465 | ||
|  | cd2e798fea | ||
|  | d5cadbeae2 | ||
|  | 8046a3ccae | ||
|  | bf91d60b31 | ||
|  | 539c047ec8 | ||
|  | 290c18fa87 | ||
|  | 98c46f5e57 | ||
|  | f8bd5b5b4e | ||
|  | 816d32edad | ||
|  | 8453835c05 | ||
|  | 9328c356c8 | ||
|  | 89e3c1fc94 | ||
|  | 67e54cd15d | ||
|  | 278ea24786 | ||
|  | aba1662631 | ||
|  | 61eeb60c19 | ||
|  | 5e9a8f4806 | ||
|  | 4cb274e9bc | ||
|  | 8b9b1a6a35 | ||
|  | 2655964113 | ||
|  | 188bad061b | ||
|  | 3af4c329aa | ||
|  | 6c13395f7d | ||
|  | 77b32ba360 | ||
|  | 91dba291ac | ||
|  | a6bc293640 | ||
|  | 53882d6e5f | ||
|  | d68adfbf10 | ||
|  | 498a392d7f | ||
|  | 740f6c05db | ||
|  | d810ce301f | ||
|  | 5ef6a14d24 | ||
|  | a13f6f1e68 | ||
|  | d2d0f1aaee | ||
|  | e64c72cc89 | ||
|  | 9ab915a08b | ||
|  | e26fbf0328 | ||
|  | d9a52c4a2a | ||
|  | 7b2ec90de9 | ||
|  | d310bf8bbf | ||
|  | 2abc6cc939 | ||
|  | 56d4e694a2 | ||
|  | 5f002c9cdc | ||
|  | 759daf4b4a | ||
|  | 3a8d9568e3 | ||
|  | ff22a9d94a | ||
|  | a6e42d5374 | ||
|  | a2f74e0488 | ||
|  | ee44240569 | ||
|  | d0828744a2 | ||
|  | 6e2e576b29 | ||
|  | bf61e27f8a | ||
|  | c441c30b46 | ||
|  | 0e741230ea | ||
|  | 1bfe9ac2db | ||
|  | 6812e72348 | ||
|  | b6449d2f5b | ||
|  | 7e3ea20dce | ||
|  | c9d6fe9dcd | ||
|  | 4a649a6b8b | ||
|  | 8fef184963 | ||
|  | 69583ca3c0 | ||
|  | 6038a68e91 | ||
|  | fa8bd8db87 | ||
|  | 18b4f0ed0f | ||
|  | 461f9d66c9 | ||
|  | 2155103c7a | ||
|  | c9a6839c45 | ||
|  | 9fbe331a80 | ||
|  | a56389c4ce | ||
|  | 64656784cb | ||
|  | 6eff2c181e | ||
|  | 1aa48c6d62 | ||
|  | c7ca1a346d | ||
|  | fa0ec7b502 | ||
|  | 768438c136 | ||
|  | 9badea0b3c | ||
|  | 43263a1650 | ||
|  | 821e02dc75 | ||
|  | ed011ecf28 | ||
|  | d861de4c2f | ||
|  | 3a3b2449dc | ||
|  | d2614406ca | ||
|  | 0798d098ae | ||
|  | dab7ddc2bb | ||
|  | 081a96e281 | ||
|  | a7dd881d79 | ||
|  | 8134d5e24d | ||
|  | ba6756cd45 | ||
|  | 5d8fce21ac | ||
|  | e7e4a5bcd4 | ||
|  | 55f33357ea | ||
|  | 90568bba31 | ||
|  | 5d6e2dc2e4 | ||
|  | 6bb33f2559 | ||
|  | ced92554ed | ||
|  | dff3383158 | ||
|  | bf03c89cb2 | ||
|  | 9f1484bbef | ||
|  | 3899680e26 | ||
|  | 6bb2eb25a1 | ||
|  | f8dfd8edb3 | ||
|  | 042be624a3 | ||
|  | 6bafa4c79a | ||
|  | 58b42fac5c | ||
|  | 3b47b9558a | ||
|  | ccf9636296 | ||
|  | 96942719f2 | ||
|  | 69cf1c1adc | ||
|  | d77cba40b8 | ||
|  | 968735b555 | ||
|  | ceed9d29eb | ||
|  | 41329039ee | ||
|  | f68b102ca8 | ||
|  | fa36e54298 | ||
|  | b689f57435 | ||
|  | 885fa0ff56 | ||
|  | 303acb72a3 | ||
|  | b2a46cd0cd | ||
|  | 5a5ecb3ee3 | ||
|  | 60b4ab6a63 | ||
|  | e4b096a08f | ||
|  | 343f55049b | ||
|  | 6b46025261 | ||
|  | 5ea503f23e | ||
|  | ce95f9ac23 | ||
|  | c3fb87501b | ||
|  | dc6a343612 | ||
|  | 3a61053957 | ||
|  | 570129e4d4 | ||
|  | 3315c7045f | ||
|  | 5ae50e242c | ||
|  | bbcf449719 | ||
|  | aab10f7184 | ||
|  | 8d43488cb8 | ||
|  | 0a9c647e19 | ||
|  | 40db5d4aa8 | ||
|  | 9254532baa | ||
|  | 7abed47cf0 | ||
|  | 5c6ac758f7 | ||
|  | 007677962c | ||
|  | 9c4aeab64a | ||
|  | 48e6fc0efe | ||
|  | c8be713d11 | ||
|  | ae887c8648 | ||
|  | 5daac2531b | ||
|  | 68def00327 | ||
|  | 67e7976710 | ||
|  | 35747e937e | ||
|  | fb439787a4 | ||
|  | 8fa368f473 | ||
|  | c84a9d07b1 | ||
|  | 7fb46cdfc4 | ||
|  | 52985e5ddc | ||
|  | e880935dc3 | ||
|  | cc22b1bca5 | ||
|  | 49a5128918 | ||
|  | fedc7dcb44 | ||
|  | cd32b20215 | ||
|  | 15cd9832c4 | ||
|  | f25d4e4553 | ||
|  | 12d1c82b63 | ||
|  | aebe855078 | ||
|  | 3416a71ebd | ||
|  | 94b3fea528 | ||
|  | ad1a9ecca1 | ||
|  | 715accfb8a | ||
|  | a8e03c6138 | ||
|  | f69446b648 | ||
|  | eedfbe5846 | ||
|  | 153351cc9f | ||
|  | 1b1eec40a7 | ||
|  | 763877541a | ||
|  | 1fad7d72a2 | ||
|  | 51ea2ea879 | ||
|  | d77a478bf0 | ||
|  | e413c0264a | ||
|  | f88e7f898c | ||
|  | d07bd4a6db | ||
|  | fb34c099d5 | ||
|  | 1d2ee56a15 | ||
|  | 86665f7f09 | ||
|  | 0d2b4af986 | ||
|  | dc2b2eeb9f | ||
|  | e5dbb66d53 | ||
|  | 3474b1c471 | ||
|  | 3886de5b7c | ||
|  | 2b3cec06b3 | ||
|  | 8536754d14 | ||
|  | 1f36235801 | 
							
								
								
									
										28
									
								
								.devcontainer/.env.example
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								.devcontainer/.env.example
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,28 @@ | ||||
| COMPOSE_PROJECT_NAME=trmm | ||||
|  | ||||
| IMAGE_REPO=tacticalrmm/ | ||||
| VERSION=latest | ||||
|  | ||||
| # tactical credentials (Used to login to dashboard) | ||||
| TRMM_USER=tactical | ||||
| TRMM_PASS=tactical | ||||
|  | ||||
| # dns settings | ||||
| APP_HOST=rmm.example.com | ||||
| API_HOST=api.example.com | ||||
| MESH_HOST=mesh.example.com | ||||
|  | ||||
| # mesh settings | ||||
| MESH_USER=tactical | ||||
| MESH_PASS=tactical | ||||
| MONGODB_USER=mongouser | ||||
| MONGODB_PASSWORD=mongopass | ||||
|  | ||||
| # database settings | ||||
| POSTGRES_USER=postgres | ||||
| POSTGRES_PASS=postgrespass | ||||
|  | ||||
| # DEV SETTINGS | ||||
| APP_PORT=80 | ||||
| API_PORT=80 | ||||
| HTTP_PROTOCOL=https | ||||
							
								
								
									
										28
									
								
								.devcontainer/api.dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								.devcontainer/api.dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,28 @@ | ||||
| FROM python:3.8-slim | ||||
|  | ||||
| ENV TACTICAL_DIR /opt/tactical | ||||
| ENV TACTICAL_GO_DIR /usr/local/rmmgo | ||||
| ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready | ||||
| ENV WORKSPACE_DIR /workspace | ||||
| ENV TACTICAL_USER tactical | ||||
| ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env | ||||
| ENV PYTHONDONTWRITEBYTECODE=1 | ||||
| ENV PYTHONUNBUFFERED=1 | ||||
|  | ||||
| EXPOSE 8000 | ||||
|  | ||||
| RUN groupadd -g 1000 tactical && \ | ||||
|     useradd -u 1000 -g 1000 tactical | ||||
|  | ||||
| # Copy Go Files | ||||
| COPY --from=golang:1.15 /usr/local/go ${TACTICAL_GO_DIR}/go | ||||
|  | ||||
| # Copy Dev python reqs | ||||
| COPY ./requirements.txt / | ||||
|  | ||||
| # Copy Docker Entrypoint | ||||
| COPY ./entrypoint.sh / | ||||
| RUN chmod +x /entrypoint.sh | ||||
| ENTRYPOINT ["/entrypoint.sh"] | ||||
|  | ||||
| WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm | ||||
							
								
								
									
										19
									
								
								.devcontainer/docker-compose.debug.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								.devcontainer/docker-compose.debug.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | ||||
| version: '3.4' | ||||
|  | ||||
| services: | ||||
|   api-dev: | ||||
|     image: api-dev | ||||
|     build: | ||||
|       context: . | ||||
|       dockerfile: ./api.dockerfile | ||||
|     command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000 --nothreading --noreload"] | ||||
|     ports: | ||||
|       - 8000:8000 | ||||
|       - 5678:5678 | ||||
|     volumes: | ||||
|       - tactical-data-dev:/opt/tactical | ||||
|       - ..:/workspace:cached | ||||
|     networks: | ||||
|       dev: | ||||
|         aliases:  | ||||
|           - tactical-backend | ||||
							
								
								
									
										211
									
								
								.devcontainer/docker-compose.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										211
									
								
								.devcontainer/docker-compose.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,211 @@ | ||||
| version: '3.4' | ||||
|  | ||||
| services: | ||||
|   api-dev: | ||||
|     image: api-dev | ||||
|     restart: always | ||||
|     build: | ||||
|       context: . | ||||
|       dockerfile: ./api.dockerfile | ||||
|     command: ["tactical-api"] | ||||
|     environment: | ||||
|       API_PORT: ${API_PORT} | ||||
|     ports: | ||||
|       - "8000:${API_PORT}" | ||||
|     volumes: | ||||
|       - tactical-data-dev:/opt/tactical | ||||
|       - ..:/workspace:cached | ||||
|     networks: | ||||
|       dev: | ||||
|         aliases:  | ||||
|           - tactical-backend | ||||
|  | ||||
|   app-dev: | ||||
|     image: node:12-alpine | ||||
|     restart: always | ||||
|     command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}" | ||||
|     working_dir: /workspace/web | ||||
|     volumes: | ||||
|       - ..:/workspace:cached | ||||
|     ports: | ||||
|       - "8080:${APP_PORT}" | ||||
|     networks: | ||||
|       dev: | ||||
|         aliases:  | ||||
|           - tactical-frontend | ||||
|  | ||||
|   # nats | ||||
|   nats-dev: | ||||
|     image: ${IMAGE_REPO}tactical-nats:${VERSION} | ||||
|     restart: always | ||||
|     environment: | ||||
|       API_HOST: ${API_HOST} | ||||
|       API_PORT: ${API_PORT} | ||||
|       DEV: 1 | ||||
|     ports: | ||||
|       - "4222:4222" | ||||
|     volumes: | ||||
|       - tactical-data-dev:/opt/tactical | ||||
|       - ..:/workspace:cached | ||||
|     networks: | ||||
|       dev: | ||||
|         aliases: | ||||
|           - ${API_HOST} | ||||
|           - tactical-nats | ||||
|  | ||||
|   # meshcentral container | ||||
|   meshcentral-dev: | ||||
|     image: ${IMAGE_REPO}tactical-meshcentral:${VERSION} | ||||
|     restart: always | ||||
|     environment:  | ||||
|       MESH_HOST: ${MESH_HOST} | ||||
|       MESH_USER: ${MESH_USER} | ||||
|       MESH_PASS: ${MESH_PASS} | ||||
|       MONGODB_USER: ${MONGODB_USER} | ||||
|       MONGODB_PASSWORD: ${MONGODB_PASSWORD} | ||||
|       NGINX_HOST_IP: 172.21.0.20 | ||||
|     networks: | ||||
|       dev: | ||||
|         aliases: | ||||
|           - tactical-meshcentral | ||||
|           - ${MESH_HOST} | ||||
|     volumes: | ||||
|       - tactical-data-dev:/opt/tactical | ||||
|       - mesh-data-dev:/home/node/app/meshcentral-data | ||||
|     depends_on: | ||||
|       - mongodb-dev | ||||
|  | ||||
|   # mongodb container for meshcentral | ||||
|   mongodb-dev: | ||||
|     image: mongo:4.4 | ||||
|     restart: always | ||||
|     environment: | ||||
|       MONGO_INITDB_ROOT_USERNAME: ${MONGODB_USER} | ||||
|       MONGO_INITDB_ROOT_PASSWORD: ${MONGODB_PASSWORD} | ||||
|       MONGO_INITDB_DATABASE: meshcentral | ||||
|     networks: | ||||
|       dev: | ||||
|         aliases: | ||||
|           - tactical-mongodb | ||||
|     volumes: | ||||
|       - mongo-dev-data:/data/db | ||||
|  | ||||
|   # postgres database for api service | ||||
|   postgres-dev: | ||||
|     image: postgres:13-alpine | ||||
|     restart: always | ||||
|     environment: | ||||
|       POSTGRES_DB: tacticalrmm | ||||
|       POSTGRES_USER: ${POSTGRES_USER} | ||||
|       POSTGRES_PASSWORD: ${POSTGRES_PASS} | ||||
|     volumes: | ||||
|       - postgres-data-dev:/var/lib/postgresql/data | ||||
|     networks: | ||||
|       dev: | ||||
|         aliases: | ||||
|           - tactical-postgres | ||||
|  | ||||
|   # redis container for celery tasks | ||||
|   redis-dev: | ||||
|     restart: always | ||||
|     image: redis:6.0-alpine | ||||
|     networks: | ||||
|       dev: | ||||
|         aliases: | ||||
|           - tactical-redis | ||||
|  | ||||
|   init-dev: | ||||
|     image: api-dev | ||||
|     build: | ||||
|       context: . | ||||
|       dockerfile: ./api.dockerfile | ||||
|     restart: on-failure | ||||
|     command: ["tactical-init-dev"] | ||||
|     environment: | ||||
|       POSTGRES_USER: ${POSTGRES_USER} | ||||
|       POSTGRES_PASS: ${POSTGRES_PASS} | ||||
|       APP_HOST: ${APP_HOST} | ||||
|       API_HOST: ${API_HOST} | ||||
|       MESH_HOST: ${MESH_HOST} | ||||
|       MESH_USER: ${MESH_USER} | ||||
|       TRMM_USER: ${TRMM_USER} | ||||
|       TRMM_PASS: ${TRMM_PASS} | ||||
|       HTTP_PROTOCOL: ${HTTP_PROTOCOL} | ||||
|       APP_PORT: ${APP_PORT} | ||||
|     depends_on: | ||||
|       - postgres-dev | ||||
|       - meshcentral-dev | ||||
|     networks: | ||||
|       - dev | ||||
|     volumes: | ||||
|       - tactical-data-dev:/opt/tactical | ||||
|       - ..:/workspace:cached | ||||
|  | ||||
|   # container for celery worker service | ||||
|   celery-dev: | ||||
|     image: api-dev | ||||
|     build: | ||||
|       context: . | ||||
|       dockerfile: ./api.dockerfile | ||||
|     command: ["tactical-celery-dev"] | ||||
|     restart: always | ||||
|     networks: | ||||
|       - dev | ||||
|     volumes: | ||||
|       - tactical-data-dev:/opt/tactical | ||||
|       - ..:/workspace:cached | ||||
|     depends_on: | ||||
|       - postgres-dev | ||||
|       - redis-dev | ||||
|  | ||||
|   # container for celery beat service | ||||
|   celerybeat-dev: | ||||
|     image: api-dev | ||||
|     build: | ||||
|       context: . | ||||
|       dockerfile: ./api.dockerfile | ||||
|     command: ["tactical-celerybeat-dev"] | ||||
|     restart: always | ||||
|     networks: | ||||
|       - dev | ||||
|     volumes: | ||||
|       - tactical-data-dev:/opt/tactical | ||||
|       - ..:/workspace:cached | ||||
|     depends_on: | ||||
|       - postgres-dev | ||||
|       - redis-dev | ||||
|  | ||||
|   nginx-dev: | ||||
|   # container for tactical reverse proxy | ||||
|     image: ${IMAGE_REPO}tactical-nginx:${VERSION} | ||||
|     restart: always | ||||
|     environment: | ||||
|       APP_HOST: ${APP_HOST} | ||||
|       API_HOST: ${API_HOST} | ||||
|       MESH_HOST: ${MESH_HOST} | ||||
|       CERT_PUB_KEY: ${CERT_PUB_KEY} | ||||
|       CERT_PRIV_KEY: ${CERT_PRIV_KEY} | ||||
|       APP_PORT: ${APP_PORT} | ||||
|       API_PORT: ${API_PORT} | ||||
|     networks: | ||||
|       dev: | ||||
|         ipv4_address: 172.21.0.20 | ||||
|     ports: | ||||
|       - "80:80" | ||||
|       - "443:443" | ||||
|     volumes: | ||||
|       - tactical-data-dev:/opt/tactical | ||||
|  | ||||
| volumes: | ||||
|   tactical-data-dev: | ||||
|   postgres-data-dev: | ||||
|   mongo-dev-data: | ||||
|   mesh-data-dev: | ||||
|  | ||||
| networks: | ||||
|   dev: | ||||
|     driver: bridge | ||||
|     ipam: | ||||
|       driver: default | ||||
|       config: | ||||
|         - subnet: 172.21.0.0/24   | ||||
							
								
								
									
										168
									
								
								.devcontainer/entrypoint.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										168
									
								
								.devcontainer/entrypoint.sh
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,168 @@ | ||||
| #!/usr/bin/env bash | ||||
|  | ||||
| set -e | ||||
|  | ||||
| : "${TRMM_USER:=tactical}" | ||||
| : "${TRMM_PASS:=tactical}" | ||||
| : "${POSTGRES_HOST:=tactical-postgres}" | ||||
| : "${POSTGRES_PORT:=5432}" | ||||
| : "${POSTGRES_USER:=tactical}" | ||||
| : "${POSTGRES_PASS:=tactical}" | ||||
| : "${POSTGRES_DB:=tacticalrmm}" | ||||
| : "${MESH_CONTAINER:=tactical-meshcentral}" | ||||
| : "${MESH_USER:=meshcentral}" | ||||
| : "${MESH_PASS:=meshcentralpass}" | ||||
| : "${MESH_HOST:=tactical-meshcentral}" | ||||
| : "${API_HOST:=tactical-backend}" | ||||
| : "${APP_HOST:=tactical-frontend}" | ||||
| : "${REDIS_HOST:=tactical-redis}" | ||||
| : "${HTTP_PROTOCOL:=http}" | ||||
| : "${APP_PORT:=8080}" | ||||
| : "${API_PORT:=8000}" | ||||
|  | ||||
| # Add python venv to path | ||||
| export PATH="${VIRTUAL_ENV}/bin:$PATH" | ||||
|  | ||||
| function check_tactical_ready { | ||||
|   sleep 15 | ||||
|   until [ -f "${TACTICAL_READY_FILE}" ]; do | ||||
|     echo "waiting for init container to finish install or update..." | ||||
|     sleep 10 | ||||
|   done | ||||
| } | ||||
|  | ||||
| function django_setup { | ||||
|   until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do | ||||
|     echo "waiting for postgresql container to be ready..." | ||||
|     sleep 5 | ||||
|   done | ||||
|  | ||||
|   until (echo > /dev/tcp/"${MESH_CONTAINER}"/443) &> /dev/null; do | ||||
|     echo "waiting for meshcentral container to be ready..." | ||||
|     sleep 5 | ||||
|   done | ||||
|  | ||||
|   echo "setting up django environment" | ||||
|  | ||||
|   # configure django settings | ||||
|   MESH_TOKEN="$(cat ${TACTICAL_DIR}/tmp/mesh_token)" | ||||
|  | ||||
|   DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1) | ||||
|    | ||||
|   localvars="$(cat << EOF | ||||
| SECRET_KEY = '${DJANGO_SEKRET}' | ||||
|  | ||||
| DEBUG = True | ||||
|  | ||||
| DOCKER_BUILD = True | ||||
|  | ||||
| CERT_FILE = '/opt/tactical/certs/fullchain.pem' | ||||
| KEY_FILE = '/opt/tactical/certs/privkey.pem' | ||||
|  | ||||
| SCRIPTS_DIR = '${WORKSPACE_DIR}/scripts' | ||||
|  | ||||
| ALLOWED_HOSTS = ['${API_HOST}', '*'] | ||||
|  | ||||
| ADMIN_URL = 'admin/' | ||||
|  | ||||
| CORS_ORIGIN_ALLOW_ALL = True | ||||
|  | ||||
| DATABASES = { | ||||
|     'default': { | ||||
|         'ENGINE': 'django.db.backends.postgresql', | ||||
|         'NAME': '${POSTGRES_DB}', | ||||
|         'USER': '${POSTGRES_USER}', | ||||
|         'PASSWORD': '${POSTGRES_PASS}', | ||||
|         'HOST': '${POSTGRES_HOST}', | ||||
|         'PORT': '${POSTGRES_PORT}', | ||||
|     } | ||||
| } | ||||
|  | ||||
| REST_FRAMEWORK = { | ||||
|     'DATETIME_FORMAT': '%b-%d-%Y - %H:%M', | ||||
|  | ||||
|     'DEFAULT_PERMISSION_CLASSES': ( | ||||
|         'rest_framework.permissions.IsAuthenticated', | ||||
|     ), | ||||
|     'DEFAULT_AUTHENTICATION_CLASSES': ( | ||||
|         'knox.auth.TokenAuthentication', | ||||
|     ), | ||||
| } | ||||
|  | ||||
| if not DEBUG: | ||||
|     REST_FRAMEWORK.update({ | ||||
|         'DEFAULT_RENDERER_CLASSES': ( | ||||
|             'rest_framework.renderers.JSONRenderer', | ||||
|         ) | ||||
|     }) | ||||
|  | ||||
| MESH_USERNAME = '${MESH_USER}' | ||||
| MESH_SITE = 'https://${MESH_HOST}' | ||||
| MESH_TOKEN_KEY = '${MESH_TOKEN}' | ||||
| REDIS_HOST    = '${REDIS_HOST}' | ||||
| EOF | ||||
| )" | ||||
|  | ||||
|   echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py | ||||
|  | ||||
|   # run migrations and init scripts | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py initial_mesh_setup | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py load_chocos | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py load_community_scripts | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py reload_nats | ||||
|  | ||||
|   # create super user  | ||||
|   echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell | ||||
| } | ||||
|  | ||||
| if [ "$1" = 'tactical-init-dev' ]; then | ||||
|  | ||||
|   # make directories if they don't exist | ||||
|   mkdir -p "${TACTICAL_DIR}/tmp" | ||||
|  | ||||
|   test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}" | ||||
|  | ||||
|   # setup Python virtual env and install dependencies | ||||
|   ! test -e "${VIRTUAL_ENV}" && python -m venv --copies ${VIRTUAL_ENV} | ||||
|   "${VIRTUAL_ENV}"/bin/pip install --no-cache-dir -r /requirements.txt | ||||
|  | ||||
|   django_setup | ||||
|  | ||||
|   # create .env file for frontend | ||||
|   webenv="$(cat << EOF | ||||
| PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}" | ||||
| DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}" | ||||
| APP_URL = https://${APP_HOST} | ||||
| EOF | ||||
| )" | ||||
|   echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null | ||||
|  | ||||
|   # chown everything to tactical user | ||||
|   chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}" | ||||
|   chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}" | ||||
|  | ||||
|   # create install ready file | ||||
|   su -c "echo 'tactical-init' > ${TACTICAL_READY_FILE}" "${TACTICAL_USER}" | ||||
| fi | ||||
|  | ||||
| if [ "$1" = 'tactical-api' ]; then | ||||
|   cp "${WORKSPACE_DIR}"/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo | ||||
|   chmod +x /usr/local/bin/goversioninfo | ||||
|    | ||||
|   check_tactical_ready | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py runserver 0.0.0.0:"${API_PORT}" | ||||
| fi | ||||
|  | ||||
| if [ "$1" = 'tactical-celery-dev' ]; then | ||||
|   check_tactical_ready | ||||
|   "${VIRTUAL_ENV}"/bin/celery -A tacticalrmm worker -l debug | ||||
| fi | ||||
|  | ||||
| if [ "$1" = 'tactical-celerybeat-dev' ]; then | ||||
|   check_tactical_ready | ||||
|   test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" | ||||
|   "${VIRTUAL_ENV}"/bin/celery -A tacticalrmm beat -l debug | ||||
| fi | ||||
							
								
								
									
										46
									
								
								.devcontainer/requirements.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										46
									
								
								.devcontainer/requirements.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,46 @@ | ||||
| # To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file | ||||
| amqp==5.0.5 | ||||
| asgiref==3.3.1 | ||||
| asyncio-nats-client==0.11.4 | ||||
| billiard==3.6.3.0 | ||||
| celery==5.0.5 | ||||
| certifi==2020.12.5 | ||||
| cffi==1.14.5 | ||||
| chardet==4.0.0 | ||||
| cryptography==3.4.4 | ||||
| decorator==4.4.2 | ||||
| Django==3.1.6 | ||||
| django-cors-headers==3.7.0 | ||||
| django-rest-knox==4.1.0 | ||||
| djangorestframework==3.12.2 | ||||
| future==0.18.2 | ||||
| kombu==5.0.2 | ||||
| loguru==0.5.3 | ||||
| msgpack==1.0.2 | ||||
| packaging==20.8 | ||||
| psycopg2-binary==2.8.6 | ||||
| pycparser==2.20 | ||||
| pycryptodome==3.10.1 | ||||
| pyotp==2.6.0 | ||||
| pyparsing==2.4.7 | ||||
| pytz==2021.1 | ||||
| qrcode==6.1 | ||||
| redis==3.5.3 | ||||
| requests==2.25.1 | ||||
| six==1.15.0 | ||||
| sqlparse==0.4.1 | ||||
| twilio==6.52.0 | ||||
| urllib3==1.26.3 | ||||
| validators==0.18.2 | ||||
| vine==5.0.0 | ||||
| websockets==8.1 | ||||
| zipp==3.4.0 | ||||
| black | ||||
| Werkzeug | ||||
| django-extensions | ||||
| coverage | ||||
| coveralls | ||||
| model_bakery | ||||
| mkdocs | ||||
| mkdocs-material | ||||
| pymdown-extensions | ||||
| @@ -1,5 +1,25 @@ | ||||
| .git | ||||
| .cache | ||||
| **/*.env | ||||
| **/env | ||||
| **/__pycache__ | ||||
| **/.classpath | ||||
| **/.dockerignore | ||||
| **/.env | ||||
| **/.git | ||||
| **/.gitignore | ||||
| **/.project | ||||
| **/.settings | ||||
| **/.toolstarget | ||||
| **/.vs | ||||
| **/.vscode | ||||
| **/*.*proj.user | ||||
| **/*.dbmdl | ||||
| **/*.jfm | ||||
| **/azds.yaml | ||||
| **/charts | ||||
| **/docker-compose* | ||||
| **/Dockerfile* | ||||
| **/node_modules | ||||
| **/npm-debug.log | ||||
| **/obj | ||||
| **/secrets.dev.yaml | ||||
| **/values.dev.yaml | ||||
| **/env | ||||
| README.md | ||||
|   | ||||
							
								
								
									
										12
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| # These are supported funding model platforms | ||||
|  | ||||
| github: wh1te909 | ||||
| patreon: # Replace with a single Patreon username | ||||
| open_collective: # Replace with a single Open Collective username | ||||
| ko_fi: # Replace with a single Ko-fi username | ||||
| tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel | ||||
| community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry | ||||
| liberapay: # Replace with a single Liberapay username | ||||
| issuehunt: # Replace with a single IssueHunt username | ||||
| otechie: # Replace with a single Otechie username | ||||
| custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] | ||||
							
								
								
									
										10
									
								
								.github/workflows/docker-build-push.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								.github/workflows/docker-build-push.yml
									
									
									
									
										vendored
									
									
								
							| @@ -57,16 +57,6 @@ jobs: | ||||
|           platforms: linux/amd64 | ||||
|           tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest | ||||
|            | ||||
|       - name: Build and Push Tactical Salt Image | ||||
|         uses: docker/build-push-action@v2 | ||||
|         with: | ||||
|           context: . | ||||
|           push: true | ||||
|           pull: true | ||||
|           file: ./docker/containers/tactical-salt/dockerfile | ||||
|           platforms: linux/amd64 | ||||
|           tags: tacticalrmm/tactical-salt:${{ steps.prep.outputs.version }},tacticalrmm/tactical-salt:latest | ||||
|            | ||||
|       - name: Build and Push Tactical Frontend Image | ||||
|         uses: docker/build-push-action@v2 | ||||
|         with: | ||||
|   | ||||
							
								
								
									
										14
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							| @@ -14,6 +14,20 @@ | ||||
|                 "0.0.0.0:8000" | ||||
|             ], | ||||
|             "django": true | ||||
|         }, | ||||
|         { | ||||
|             "name": "Django: Docker Remote Attach", | ||||
|             "type": "python", | ||||
|             "request": "attach", | ||||
|             "port": 5678, | ||||
|             "host": "localhost", | ||||
|             "preLaunchTask": "docker debug", | ||||
|             "pathMappings": [ | ||||
|                 { | ||||
|                     "localRoot": "${workspaceFolder}/api/tacticalrmm", | ||||
|                     "remoteRoot": "/workspace/api/tacticalrmm" | ||||
|                 } | ||||
|             ] | ||||
|         } | ||||
|     ] | ||||
| } | ||||
							
								
								
									
										19
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										19
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @@ -41,4 +41,23 @@ | ||||
|             "**/*.zip": true | ||||
|         }, | ||||
|     }, | ||||
|     "go.useLanguageServer": true, | ||||
|     "[go]": { | ||||
|         "editor.formatOnSave": true, | ||||
|         "editor.codeActionsOnSave": { | ||||
|             "source.organizeImports": false, | ||||
|         }, | ||||
|         "editor.snippetSuggestions": "none", | ||||
|     }, | ||||
|     "[go.mod]": { | ||||
|         "editor.formatOnSave": true, | ||||
|         "editor.codeActionsOnSave": { | ||||
|             "source.organizeImports": true, | ||||
|         }, | ||||
|     }, | ||||
|     "gopls": { | ||||
|         "usePlaceholders": true, | ||||
|         "completeUnimported": true, | ||||
|         "staticcheck": true, | ||||
|     } | ||||
| } | ||||
							
								
								
									
										23
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| { | ||||
|     // See https://go.microsoft.com/fwlink/?LinkId=733558 | ||||
|     // for the documentation about the tasks.json format | ||||
|     "version": "2.0.0", | ||||
|     "tasks": [ | ||||
|         { | ||||
|             "label": "docker debug", | ||||
|             "type": "shell", | ||||
|             "command": "docker-compose", | ||||
|             "args": [ | ||||
|                 "-p", | ||||
|                 "trmm", | ||||
|                 "-f", | ||||
|                 ".devcontainer/docker-compose.yml", | ||||
|                 "-f", | ||||
|                 ".devcontainer/docker-compose.debug.yml", | ||||
|                 "up", | ||||
|                 "-d", | ||||
|                 "--build" | ||||
|             ] | ||||
|         } | ||||
|     ] | ||||
| } | ||||
| @@ -6,7 +6,7 @@ | ||||
| [](https://github.com/python/black) | ||||
|  | ||||
| Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\ | ||||
| It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang, as well as the [SaltStack](https://github.com/saltstack/salt) api and [MeshCentral](https://github.com/Ylianst/MeshCentral) | ||||
| It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral) | ||||
|  | ||||
| # [LIVE DEMO](https://rmm.xlawgaming.com/) | ||||
| Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app. | ||||
| @@ -36,7 +36,7 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso | ||||
| ## Installation | ||||
|  | ||||
| ### Requirements | ||||
| - VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04) | ||||
| - VPS with 2GB ram (an install script is provided for Ubuntu Server 20.04 / Debian 10) | ||||
| - A domain you own with at least 3 subdomains | ||||
| - Google Authenticator app (2 factor is NOT optional) | ||||
|  | ||||
| @@ -62,7 +62,6 @@ sudo ufw default allow outgoing | ||||
| sudo ufw allow ssh | ||||
| sudo ufw allow http | ||||
| sudo ufw allow https | ||||
| sudo ufw allow proto tcp from any to any port 4505,4506 | ||||
| sudo ufw allow proto tcp from any to any port 4222 | ||||
| sudo ufw enable && sudo ufw reload | ||||
| ``` | ||||
|   | ||||
| @@ -1,457 +0,0 @@ | ||||
| from __future__ import absolute_import | ||||
| import psutil | ||||
| import os | ||||
| import datetime | ||||
| import zlib | ||||
| import json | ||||
| import base64 | ||||
| import wmi | ||||
| import win32evtlog | ||||
| import win32con | ||||
| import win32evtlogutil | ||||
| import winerror | ||||
| from time import sleep | ||||
| import requests | ||||
| import subprocess | ||||
| import random | ||||
| import platform | ||||
|  | ||||
| ARCH = "64" if platform.machine().endswith("64") else "32" | ||||
| PROGRAM_DIR = os.path.join(os.environ["ProgramFiles"], "TacticalAgent") | ||||
| TAC_RMM = os.path.join(PROGRAM_DIR, "tacticalrmm.exe") | ||||
| NSSM = os.path.join(PROGRAM_DIR, "nssm.exe" if ARCH == "64" else "nssm-x86.exe") | ||||
| TEMP_DIR = os.path.join(os.environ["WINDIR"], "Temp") | ||||
| SYS_DRIVE = os.environ["SystemDrive"] | ||||
| PY_BIN = os.path.join(SYS_DRIVE, "\\salt", "bin", "python.exe") | ||||
| SALT_CALL = os.path.join(SYS_DRIVE, "\\salt", "salt-call.bat") | ||||
|  | ||||
|  | ||||
| def get_services(): | ||||
|     # see https://github.com/wh1te909/tacticalrmm/issues/38 | ||||
|     # for why I am manually implementing the svc.as_dict() method of psutil | ||||
|     ret = [] | ||||
|     for svc in psutil.win_service_iter(): | ||||
|         i = {} | ||||
|         try: | ||||
|             i["display_name"] = svc.display_name() | ||||
|             i["binpath"] = svc.binpath() | ||||
|             i["username"] = svc.username() | ||||
|             i["start_type"] = svc.start_type() | ||||
|             i["status"] = svc.status() | ||||
|             i["pid"] = svc.pid() | ||||
|             i["name"] = svc.name() | ||||
|             i["description"] = svc.description() | ||||
|         except Exception: | ||||
|             continue | ||||
|         else: | ||||
|             ret.append(i) | ||||
|  | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| def run_python_script(filename, timeout, script_type="userdefined"): | ||||
|     # no longer used in agent version 0.11.0 | ||||
|     file_path = os.path.join(TEMP_DIR, filename) | ||||
|  | ||||
|     if os.path.exists(file_path): | ||||
|         try: | ||||
|             os.remove(file_path) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     if script_type == "userdefined": | ||||
|         __salt__["cp.get_file"](f"salt://scripts/userdefined/{filename}", file_path) | ||||
|     else: | ||||
|         __salt__["cp.get_file"](f"salt://scripts/{filename}", file_path) | ||||
|  | ||||
|     return __salt__["cmd.run_all"](f"{PY_BIN} {file_path}", timeout=timeout) | ||||
|  | ||||
|  | ||||
| def run_script(filepath, filename, shell, timeout, args=[], bg=False): | ||||
|     if shell == "powershell" or shell == "cmd": | ||||
|         if args: | ||||
|             return __salt__["cmd.script"]( | ||||
|                 source=filepath, | ||||
|                 args=" ".join(map(lambda x: f'"{x}"', args)), | ||||
|                 shell=shell, | ||||
|                 timeout=timeout, | ||||
|                 bg=bg, | ||||
|             ) | ||||
|         else: | ||||
|             return __salt__["cmd.script"]( | ||||
|                 source=filepath, shell=shell, timeout=timeout, bg=bg | ||||
|             ) | ||||
|  | ||||
|     elif shell == "python": | ||||
|         file_path = os.path.join(TEMP_DIR, filename) | ||||
|  | ||||
|         if os.path.exists(file_path): | ||||
|             try: | ||||
|                 os.remove(file_path) | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|         __salt__["cp.get_file"](filepath, file_path) | ||||
|  | ||||
|         salt_cmd = "cmd.run_bg" if bg else "cmd.run_all" | ||||
|  | ||||
|         if args: | ||||
|             a = " ".join(map(lambda x: f'"{x}"', args)) | ||||
|             cmd = f"{PY_BIN} {file_path} {a}" | ||||
|             return __salt__[salt_cmd](cmd, timeout=timeout) | ||||
|         else: | ||||
|             return __salt__[salt_cmd](f"{PY_BIN} {file_path}", timeout=timeout) | ||||
|  | ||||
|  | ||||
| def uninstall_agent(): | ||||
|     remove_exe = os.path.join(PROGRAM_DIR, "unins000.exe") | ||||
|     __salt__["cmd.run_bg"]([remove_exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"]) | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| def update_salt(): | ||||
|     for p in psutil.process_iter(): | ||||
|         with p.oneshot(): | ||||
|             if p.name() == "tacticalrmm.exe" and "updatesalt" in p.cmdline(): | ||||
|                 return "running" | ||||
|  | ||||
|     from subprocess import Popen, PIPE | ||||
|  | ||||
|     CREATE_NEW_PROCESS_GROUP = 0x00000200 | ||||
|     DETACHED_PROCESS = 0x00000008 | ||||
|     cmd = [TAC_RMM, "-m", "updatesalt"] | ||||
|     p = Popen( | ||||
|         cmd, | ||||
|         stdin=PIPE, | ||||
|         stdout=PIPE, | ||||
|         stderr=PIPE, | ||||
|         close_fds=True, | ||||
|         creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP, | ||||
|     ) | ||||
|     return p.pid | ||||
|  | ||||
|  | ||||
| def run_manual_checks(): | ||||
|     __salt__["cmd.run_bg"]([TAC_RMM, "-m", "runchecks"]) | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| def install_updates(): | ||||
|     for p in psutil.process_iter(): | ||||
|         with p.oneshot(): | ||||
|             if p.name() == "tacticalrmm.exe" and "winupdater" in p.cmdline(): | ||||
|                 return "running" | ||||
|  | ||||
|     return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "winupdater"]) | ||||
|  | ||||
|  | ||||
| def _wait_for_service(svc, status, retries=10): | ||||
|     attempts = 0 | ||||
|     while 1: | ||||
|         try: | ||||
|             service = psutil.win_service_get(svc) | ||||
|         except psutil.NoSuchProcess: | ||||
|             stat = "fail" | ||||
|             attempts += 1 | ||||
|             sleep(5) | ||||
|         else: | ||||
|             stat = service.status() | ||||
|             if stat != status: | ||||
|                 attempts += 1 | ||||
|                 sleep(5) | ||||
|             else: | ||||
|                 attempts = 0 | ||||
|  | ||||
|         if attempts == 0 or attempts > retries: | ||||
|             break | ||||
|  | ||||
|     return stat | ||||
|  | ||||
|  | ||||
| def agent_update_v2(inno, url): | ||||
|     # make sure another instance of the update is not running | ||||
|     # this function spawns 2 instances of itself (because we call it twice with salt run_bg) | ||||
|     # so if more than 2 running, don't continue as an update is already running | ||||
|     count = 0 | ||||
|     for p in psutil.process_iter(): | ||||
|         try: | ||||
|             with p.oneshot(): | ||||
|                 if "win_agent.agent_update_v2" in p.cmdline(): | ||||
|                     count += 1 | ||||
|         except Exception: | ||||
|             continue | ||||
|  | ||||
|     if count > 2: | ||||
|         return "already running" | ||||
|  | ||||
|     sleep(random.randint(1, 20))  # don't flood the rmm | ||||
|  | ||||
|     exe = os.path.join(TEMP_DIR, inno) | ||||
|  | ||||
|     if os.path.exists(exe): | ||||
|         try: | ||||
|             os.remove(exe) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     try: | ||||
|         r = requests.get(url, stream=True, timeout=600) | ||||
|     except Exception: | ||||
|         return "failed" | ||||
|  | ||||
|     if r.status_code != 200: | ||||
|         return "failed" | ||||
|  | ||||
|     with open(exe, "wb") as f: | ||||
|         for chunk in r.iter_content(chunk_size=1024): | ||||
|             if chunk: | ||||
|                 f.write(chunk) | ||||
|     del r | ||||
|  | ||||
|     ret = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=120) | ||||
|  | ||||
|     tac = _wait_for_service(svc="tacticalagent", status="running") | ||||
|     if tac != "running": | ||||
|         subprocess.run([NSSM, "start", "tacticalagent"], timeout=30) | ||||
|  | ||||
|     chk = _wait_for_service(svc="checkrunner", status="running") | ||||
|     if chk != "running": | ||||
|         subprocess.run([NSSM, "start", "checkrunner"], timeout=30) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| def do_agent_update_v2(inno, url): | ||||
|     return __salt__["cmd.run_bg"]( | ||||
|         [ | ||||
|             SALT_CALL, | ||||
|             "win_agent.agent_update_v2", | ||||
|             f"inno={inno}", | ||||
|             f"url={url}", | ||||
|             "--local", | ||||
|         ] | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def agent_update(version, url): | ||||
|     # make sure another instance of the update is not running | ||||
|     # this function spawns 2 instances of itself so if more than 2 running, | ||||
|     # don't continue as an update is already running | ||||
|     count = 0 | ||||
|     for p in psutil.process_iter(): | ||||
|         try: | ||||
|             with p.oneshot(): | ||||
|                 if "win_agent.agent_update" in p.cmdline(): | ||||
|                     count += 1 | ||||
|         except Exception: | ||||
|             continue | ||||
|  | ||||
|     if count > 2: | ||||
|         return "already running" | ||||
|  | ||||
|     sleep(random.randint(1, 60))  # don't flood the rmm | ||||
|     try: | ||||
|         r = requests.get(url, stream=True, timeout=600) | ||||
|     except Exception: | ||||
|         return "failed" | ||||
|  | ||||
|     if r.status_code != 200: | ||||
|         return "failed" | ||||
|  | ||||
|     exe = os.path.join(TEMP_DIR, f"winagent-v{version}.exe") | ||||
|  | ||||
|     with open(exe, "wb") as f: | ||||
|         for chunk in r.iter_content(chunk_size=1024): | ||||
|             if chunk: | ||||
|                 f.write(chunk) | ||||
|     del r | ||||
|  | ||||
|     services = ("tacticalagent", "checkrunner") | ||||
|  | ||||
|     for svc in services: | ||||
|         subprocess.run([NSSM, "stop", svc], timeout=120) | ||||
|  | ||||
|     sleep(10) | ||||
|     r = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=300) | ||||
|     sleep(30) | ||||
|  | ||||
|     for svc in services: | ||||
|         subprocess.run([NSSM, "start", svc], timeout=120) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| def do_agent_update(version, url): | ||||
|     return __salt__["cmd.run_bg"]( | ||||
|         [ | ||||
|             SALT_CALL, | ||||
|             "win_agent.agent_update", | ||||
|             f"version={version}", | ||||
|             f"url={url}", | ||||
|             "--local", | ||||
|         ] | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class SystemDetail: | ||||
|     def __init__(self): | ||||
|         self.c = wmi.WMI() | ||||
|         self.comp_sys_prod = self.c.Win32_ComputerSystemProduct() | ||||
|         self.comp_sys = self.c.Win32_ComputerSystem() | ||||
|         self.memory = self.c.Win32_PhysicalMemory() | ||||
|         self.os = self.c.Win32_OperatingSystem() | ||||
|         self.base_board = self.c.Win32_BaseBoard() | ||||
|         self.bios = self.c.Win32_BIOS() | ||||
|         self.disk = self.c.Win32_DiskDrive() | ||||
|         self.network_adapter = self.c.Win32_NetworkAdapter() | ||||
|         self.network_config = self.c.Win32_NetworkAdapterConfiguration() | ||||
|         self.desktop_monitor = self.c.Win32_DesktopMonitor() | ||||
|         self.cpu = self.c.Win32_Processor() | ||||
|         self.usb = self.c.Win32_USBController() | ||||
|  | ||||
|     def get_all(self, obj): | ||||
|         ret = [] | ||||
|         for i in obj: | ||||
|             tmp = [ | ||||
|                 {j: getattr(i, j)} | ||||
|                 for j in list(i.properties) | ||||
|                 if getattr(i, j) is not None | ||||
|             ] | ||||
|             ret.append(tmp) | ||||
|  | ||||
|         return ret | ||||
|  | ||||
|  | ||||
| def system_info(): | ||||
|     info = SystemDetail() | ||||
|     return { | ||||
|         "comp_sys_prod": info.get_all(info.comp_sys_prod), | ||||
|         "comp_sys": info.get_all(info.comp_sys), | ||||
|         "mem": info.get_all(info.memory), | ||||
|         "os": info.get_all(info.os), | ||||
|         "base_board": info.get_all(info.base_board), | ||||
|         "bios": info.get_all(info.bios), | ||||
|         "disk": info.get_all(info.disk), | ||||
|         "network_adapter": info.get_all(info.network_adapter), | ||||
|         "network_config": info.get_all(info.network_config), | ||||
|         "desktop_monitor": info.get_all(info.desktop_monitor), | ||||
|         "cpu": info.get_all(info.cpu), | ||||
|         "usb": info.get_all(info.usb), | ||||
|     } | ||||
|  | ||||
|  | ||||
| def local_sys_info(): | ||||
|     return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "sysinfo"]) | ||||
|  | ||||
|  | ||||
| def get_procs(): | ||||
|     ret = [] | ||||
|  | ||||
|     # setup | ||||
|     for proc in psutil.process_iter(): | ||||
|         with proc.oneshot(): | ||||
|             proc.cpu_percent(interval=None) | ||||
|  | ||||
|     # need time for psutil to record cpu percent | ||||
|     sleep(1) | ||||
|  | ||||
|     for c, proc in enumerate(psutil.process_iter(), 1): | ||||
|         x = {} | ||||
|         with proc.oneshot(): | ||||
|             if proc.pid == 0 or not proc.name(): | ||||
|                 continue | ||||
|  | ||||
|             x["name"] = proc.name() | ||||
|             x["cpu_percent"] = proc.cpu_percent(interval=None) / psutil.cpu_count() | ||||
|             x["memory_percent"] = proc.memory_percent() | ||||
|             x["pid"] = proc.pid | ||||
|             x["ppid"] = proc.ppid() | ||||
|             x["status"] = proc.status() | ||||
|             x["username"] = proc.username() | ||||
|             x["id"] = c | ||||
|  | ||||
|         ret.append(x) | ||||
|  | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| def _compress_json(j): | ||||
|     return { | ||||
|         "wineventlog": base64.b64encode( | ||||
|             zlib.compress(json.dumps(j).encode("utf-8", errors="ignore")) | ||||
|         ).decode("ascii", errors="ignore") | ||||
|     } | ||||
|  | ||||
|  | ||||
| def get_eventlog(logtype, last_n_days): | ||||
|  | ||||
|     start_time = datetime.datetime.now() - datetime.timedelta(days=last_n_days) | ||||
|     flags = win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ | ||||
|  | ||||
|     status_dict = { | ||||
|         win32con.EVENTLOG_AUDIT_FAILURE: "AUDIT_FAILURE", | ||||
|         win32con.EVENTLOG_AUDIT_SUCCESS: "AUDIT_SUCCESS", | ||||
|         win32con.EVENTLOG_INFORMATION_TYPE: "INFO", | ||||
|         win32con.EVENTLOG_WARNING_TYPE: "WARNING", | ||||
|         win32con.EVENTLOG_ERROR_TYPE: "ERROR", | ||||
|         0: "INFO", | ||||
|     } | ||||
|  | ||||
|     computer = "localhost" | ||||
|     hand = win32evtlog.OpenEventLog(computer, logtype) | ||||
|     total = win32evtlog.GetNumberOfEventLogRecords(hand) | ||||
|     log = [] | ||||
|     uid = 0 | ||||
|     done = False | ||||
|  | ||||
|     try: | ||||
|         while 1: | ||||
|             events = win32evtlog.ReadEventLog(hand, flags, 0) | ||||
|             for ev_obj in events: | ||||
|  | ||||
|                 uid += 1 | ||||
|                 # return once total number of events reach or we'll be stuck in an infinite loop | ||||
|                 if uid >= total: | ||||
|                     done = True | ||||
|                     break | ||||
|  | ||||
|                 the_time = ev_obj.TimeGenerated.Format() | ||||
|                 time_obj = datetime.datetime.strptime(the_time, "%c") | ||||
|                 if time_obj < start_time: | ||||
|                     done = True | ||||
|                     break | ||||
|  | ||||
|                 computer = str(ev_obj.ComputerName) | ||||
|                 src = str(ev_obj.SourceName) | ||||
|                 evt_type = str(status_dict[ev_obj.EventType]) | ||||
|                 evt_id = str(winerror.HRESULT_CODE(ev_obj.EventID)) | ||||
|                 evt_category = str(ev_obj.EventCategory) | ||||
|                 record = str(ev_obj.RecordNumber) | ||||
|                 msg = ( | ||||
|                     str(win32evtlogutil.SafeFormatMessage(ev_obj, logtype)) | ||||
|                     .replace("<", "") | ||||
|                     .replace(">", "") | ||||
|                 ) | ||||
|  | ||||
|                 event_dict = { | ||||
|                     "computer": computer, | ||||
|                     "source": src, | ||||
|                     "eventType": evt_type, | ||||
|                     "eventID": evt_id, | ||||
|                     "eventCategory": evt_category, | ||||
|                     "message": msg, | ||||
|                     "time": the_time, | ||||
|                     "record": record, | ||||
|                     "uid": uid, | ||||
|                 } | ||||
|  | ||||
|                 log.append(event_dict) | ||||
|  | ||||
|             if done: | ||||
|                 break | ||||
|  | ||||
|     except Exception: | ||||
|         pass | ||||
|  | ||||
|     win32evtlog.CloseEventLog(hand) | ||||
|     return _compress_json(log) | ||||
| @@ -6,28 +6,28 @@ from django.db import migrations, models | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('accounts', '0002_auto_20200810_0544'), | ||||
|         ("accounts", "0002_auto_20200810_0544"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='user', | ||||
|             name='created_by', | ||||
|             model_name="user", | ||||
|             name="created_by", | ||||
|             field=models.CharField(blank=True, max_length=100, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='user', | ||||
|             name='created_time', | ||||
|             model_name="user", | ||||
|             name="created_time", | ||||
|             field=models.DateTimeField(auto_now_add=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='user', | ||||
|             name='modified_by', | ||||
|             model_name="user", | ||||
|             name="modified_by", | ||||
|             field=models.CharField(blank=True, max_length=100, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='user', | ||||
|             name='modified_time', | ||||
|             model_name="user", | ||||
|             name="modified_time", | ||||
|             field=models.DateTimeField(auto_now=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -6,24 +6,24 @@ from django.db import migrations | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('accounts', '0003_auto_20200922_1344'), | ||||
|         ("accounts", "0003_auto_20200922_1344"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='user', | ||||
|             name='created_by', | ||||
|             model_name="user", | ||||
|             name="created_by", | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='user', | ||||
|             name='created_time', | ||||
|             model_name="user", | ||||
|             name="created_time", | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='user', | ||||
|             name='modified_by', | ||||
|             model_name="user", | ||||
|             name="modified_by", | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='user', | ||||
|             name='modified_time', | ||||
|             model_name="user", | ||||
|             name="modified_time", | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -6,28 +6,28 @@ from django.db import migrations, models | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('accounts', '0004_auto_20201002_1257'), | ||||
|         ("accounts", "0004_auto_20201002_1257"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='user', | ||||
|             name='created_by', | ||||
|             model_name="user", | ||||
|             name="created_by", | ||||
|             field=models.CharField(blank=True, max_length=100, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='user', | ||||
|             name='created_time', | ||||
|             model_name="user", | ||||
|             name="created_time", | ||||
|             field=models.DateTimeField(auto_now_add=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='user', | ||||
|             name='modified_by', | ||||
|             model_name="user", | ||||
|             name="modified_by", | ||||
|             field=models.CharField(blank=True, max_length=100, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='user', | ||||
|             name='modified_time', | ||||
|             model_name="user", | ||||
|             name="modified_time", | ||||
|             field=models.DateTimeField(auto_now=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -6,13 +6,13 @@ from django.db import migrations, models | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('accounts', '0007_update_agent_primary_key'), | ||||
|         ("accounts", "0007_update_agent_primary_key"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='user', | ||||
|             name='dark_mode', | ||||
|             model_name="user", | ||||
|             name="dark_mode", | ||||
|             field=models.BooleanField(default=True), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2020-12-10 17:00 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("accounts", "0008_user_dark_mode"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="user", | ||||
|             name="show_community_scripts", | ||||
|             field=models.BooleanField(default=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,26 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-14 01:23 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("accounts", "0009_user_show_community_scripts"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="user", | ||||
|             name="agent_dblclick_action", | ||||
|             field=models.CharField( | ||||
|                 choices=[ | ||||
|                     ("editagent", "Edit Agent"), | ||||
|                     ("takecontrol", "Take Control"), | ||||
|                     ("remotebg", "Remote Background"), | ||||
|                 ], | ||||
|                 default="editagent", | ||||
|                 max_length=50, | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,26 @@ | ||||
| # Generated by Django 3.1.5 on 2021-01-18 09:40 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("accounts", "0010_user_agent_dblclick_action"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="user", | ||||
|             name="default_agent_tbl_tab", | ||||
|             field=models.CharField( | ||||
|                 choices=[ | ||||
|                     ("server", "Servers"), | ||||
|                     ("workstation", "Workstations"), | ||||
|                     ("mixed", "Mixed"), | ||||
|                 ], | ||||
|                 default="server", | ||||
|                 max_length=50, | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @@ -3,11 +3,30 @@ from django.contrib.auth.models import AbstractUser | ||||
|  | ||||
| from logs.models import BaseAuditModel | ||||
|  | ||||
| AGENT_DBLCLICK_CHOICES = [ | ||||
|     ("editagent", "Edit Agent"), | ||||
|     ("takecontrol", "Take Control"), | ||||
|     ("remotebg", "Remote Background"), | ||||
| ] | ||||
|  | ||||
| AGENT_TBL_TAB_CHOICES = [ | ||||
|     ("server", "Servers"), | ||||
|     ("workstation", "Workstations"), | ||||
|     ("mixed", "Mixed"), | ||||
| ] | ||||
|  | ||||
|  | ||||
| class User(AbstractUser, BaseAuditModel): | ||||
|     is_active = models.BooleanField(default=True) | ||||
|     totp_key = models.CharField(max_length=50, null=True, blank=True) | ||||
|     dark_mode = models.BooleanField(default=True) | ||||
|     show_community_scripts = models.BooleanField(default=True) | ||||
|     agent_dblclick_action = models.CharField( | ||||
|         max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent" | ||||
|     ) | ||||
|     default_agent_tbl_tab = models.CharField( | ||||
|         max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server" | ||||
|     ) | ||||
|  | ||||
|     agent = models.OneToOneField( | ||||
|         "agents.Agent", | ||||
|   | ||||
| @@ -155,6 +155,33 @@ class GetUpdateDeleteUser(TacticalTestCase): | ||||
|  | ||||
|         self.check_not_authenticated("put", url) | ||||
|  | ||||
|     @override_settings(ROOT_USER="john") | ||||
|     def test_put_root_user(self): | ||||
|         url = f"/accounts/{self.john.pk}/users/" | ||||
|         data = { | ||||
|             "id": self.john.pk, | ||||
|             "username": "john", | ||||
|             "email": "johndoe@xlawgaming.com", | ||||
|             "first_name": "John", | ||||
|             "last_name": "Doe", | ||||
|         } | ||||
|         r = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|     @override_settings(ROOT_USER="john") | ||||
|     def test_put_not_root_user(self): | ||||
|         url = f"/accounts/{self.john.pk}/users/" | ||||
|         data = { | ||||
|             "id": self.john.pk, | ||||
|             "username": "john", | ||||
|             "email": "johndoe@xlawgaming.com", | ||||
|             "first_name": "John", | ||||
|             "last_name": "Doe", | ||||
|         } | ||||
|         self.client.force_authenticate(user=self.alice) | ||||
|         r = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|  | ||||
|     def test_delete(self): | ||||
|         url = f"/accounts/{self.john.pk}/users/" | ||||
|         r = self.client.delete(url) | ||||
| @@ -166,6 +193,19 @@ class GetUpdateDeleteUser(TacticalTestCase): | ||||
|  | ||||
|         self.check_not_authenticated("delete", url) | ||||
|  | ||||
|     @override_settings(ROOT_USER="john") | ||||
|     def test_delete_root_user(self): | ||||
|         url = f"/accounts/{self.john.pk}/users/" | ||||
|         r = self.client.delete(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|     @override_settings(ROOT_USER="john") | ||||
|     def test_delete_non_root_user(self): | ||||
|         url = f"/accounts/{self.john.pk}/users/" | ||||
|         self.client.force_authenticate(user=self.alice) | ||||
|         r = self.client.delete(url) | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|  | ||||
|  | ||||
| class TestUserAction(TacticalTestCase): | ||||
|     def setUp(self): | ||||
| @@ -184,6 +224,21 @@ class TestUserAction(TacticalTestCase): | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     @override_settings(ROOT_USER="john") | ||||
|     def test_post_root_user(self): | ||||
|         url = "/accounts/users/reset/" | ||||
|         data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"} | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|     @override_settings(ROOT_USER="john") | ||||
|     def test_post_non_root_user(self): | ||||
|         url = "/accounts/users/reset/" | ||||
|         data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"} | ||||
|         self.client.force_authenticate(user=self.alice) | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|  | ||||
|     def test_put(self): | ||||
|         url = "/accounts/users/reset/" | ||||
|         data = {"id": self.john.pk} | ||||
| @@ -195,12 +250,42 @@ class TestUserAction(TacticalTestCase): | ||||
|  | ||||
|         self.check_not_authenticated("put", url) | ||||
|  | ||||
|     def test_darkmode(self): | ||||
|     @override_settings(ROOT_USER="john") | ||||
|     def test_put_root_user(self): | ||||
|         url = "/accounts/users/reset/" | ||||
|         data = {"id": self.john.pk} | ||||
|         r = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         user = User.objects.get(pk=self.john.pk) | ||||
|         self.assertEqual(user.totp_key, "") | ||||
|  | ||||
|     @override_settings(ROOT_USER="john") | ||||
|     def test_put_non_root_user(self): | ||||
|         url = "/accounts/users/reset/" | ||||
|         data = {"id": self.john.pk} | ||||
|         self.client.force_authenticate(user=self.alice) | ||||
|         r = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|  | ||||
|     def test_user_ui(self): | ||||
|         url = "/accounts/users/ui/" | ||||
|         data = {"dark_mode": False} | ||||
|         r = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         data = {"show_community_scripts": True} | ||||
|         r = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         data = { | ||||
|             "userui": True, | ||||
|             "agent_dblclick_action": "editagent", | ||||
|             "default_agent_tbl_tab": "mixed", | ||||
|         } | ||||
|         r = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -60,7 +60,7 @@ class LoginView(KnoxLoginView): | ||||
|  | ||||
|         if settings.DEBUG and token == "sekret": | ||||
|             valid = True | ||||
|         elif totp.verify(token, valid_window=1): | ||||
|         elif totp.verify(token, valid_window=10): | ||||
|             valid = True | ||||
|  | ||||
|         if valid: | ||||
| @@ -108,6 +108,13 @@ class GetUpdateDeleteUser(APIView): | ||||
|     def put(self, request, pk): | ||||
|         user = get_object_or_404(User, pk=pk) | ||||
|  | ||||
|         if ( | ||||
|             hasattr(settings, "ROOT_USER") | ||||
|             and request.user != user | ||||
|             and user.username == settings.ROOT_USER | ||||
|         ): | ||||
|             return notify_error("The root user cannot be modified from the UI") | ||||
|  | ||||
|         serializer = UserSerializer(instance=user, data=request.data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save() | ||||
| @@ -115,7 +122,15 @@ class GetUpdateDeleteUser(APIView): | ||||
|         return Response("ok") | ||||
|  | ||||
|     def delete(self, request, pk): | ||||
|         get_object_or_404(User, pk=pk).delete() | ||||
|         user = get_object_or_404(User, pk=pk) | ||||
|         if ( | ||||
|             hasattr(settings, "ROOT_USER") | ||||
|             and request.user != user | ||||
|             and user.username == settings.ROOT_USER | ||||
|         ): | ||||
|             return notify_error("The root user cannot be deleted from the UI") | ||||
|  | ||||
|         user.delete() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
| @@ -124,8 +139,14 @@ class UserActions(APIView): | ||||
|  | ||||
|     # reset password | ||||
|     def post(self, request): | ||||
|  | ||||
|         user = get_object_or_404(User, pk=request.data["id"]) | ||||
|         if ( | ||||
|             hasattr(settings, "ROOT_USER") | ||||
|             and request.user != user | ||||
|             and user.username == settings.ROOT_USER | ||||
|         ): | ||||
|             return notify_error("The root user cannot be modified from the UI") | ||||
|  | ||||
|         user.set_password(request.data["password"]) | ||||
|         user.save() | ||||
|  | ||||
| @@ -133,8 +154,14 @@ class UserActions(APIView): | ||||
|  | ||||
|     # reset two factor token | ||||
|     def put(self, request): | ||||
|  | ||||
|         user = get_object_or_404(User, pk=request.data["id"]) | ||||
|         if ( | ||||
|             hasattr(settings, "ROOT_USER") | ||||
|             and request.user != user | ||||
|             and user.username == settings.ROOT_USER | ||||
|         ): | ||||
|             return notify_error("The root user cannot be modified from the UI") | ||||
|  | ||||
|         user.totp_key = "" | ||||
|         user.save() | ||||
|  | ||||
| @@ -161,6 +188,18 @@ class TOTPSetup(APIView): | ||||
| class UserUI(APIView): | ||||
|     def patch(self, request): | ||||
|         user = request.user | ||||
|         user.dark_mode = request.data["dark_mode"] | ||||
|         user.save(update_fields=["dark_mode"]) | ||||
|         return Response("ok") | ||||
|  | ||||
|         if "dark_mode" in request.data.keys(): | ||||
|             user.dark_mode = request.data["dark_mode"] | ||||
|             user.save(update_fields=["dark_mode"]) | ||||
|  | ||||
|         if "show_community_scripts" in request.data.keys(): | ||||
|             user.show_community_scripts = request.data["show_community_scripts"] | ||||
|             user.save(update_fields=["show_community_scripts"]) | ||||
|  | ||||
|         if "userui" in request.data.keys(): | ||||
|             user.agent_dblclick_action = request.data["agent_dblclick_action"] | ||||
|             user.default_agent_tbl_tab = request.data["default_agent_tbl_tab"] | ||||
|             user.save(update_fields=["agent_dblclick_action", "default_agent_tbl_tab"]) | ||||
|  | ||||
|         return Response("ok") | ||||
|   | ||||
| @@ -1,8 +1,7 @@ | ||||
| from django.contrib import admin | ||||
|  | ||||
| from .models import Agent, AgentOutage, RecoveryAction, Note | ||||
| from .models import Agent, RecoveryAction, Note | ||||
|  | ||||
| admin.site.register(Agent) | ||||
| admin.site.register(AgentOutage) | ||||
| admin.site.register(RecoveryAction) | ||||
| admin.site.register(Note) | ||||
|   | ||||
| @@ -3,19 +3,20 @@ import string | ||||
| import os | ||||
| import json | ||||
|  | ||||
| from model_bakery.recipe import Recipe, seq | ||||
| from model_bakery.recipe import Recipe, foreign_key | ||||
| from itertools import cycle | ||||
| from django.utils import timezone as djangotime | ||||
| from django.conf import settings | ||||
|  | ||||
| from .models import Agent | ||||
|  | ||||
|  | ||||
| def generate_agent_id(hostname): | ||||
|     rand = "".join(random.choice(string.ascii_letters) for _ in range(35)) | ||||
|     return f"{rand}-{hostname}" | ||||
|  | ||||
|  | ||||
| site = Recipe("clients.Site") | ||||
|  | ||||
|  | ||||
| def get_wmi_data(): | ||||
|     with open( | ||||
|         os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json") | ||||
| @@ -24,9 +25,10 @@ def get_wmi_data(): | ||||
|  | ||||
|  | ||||
| agent = Recipe( | ||||
|     Agent, | ||||
|     "agents.Agent", | ||||
|     site=foreign_key(site), | ||||
|     hostname="DESKTOP-TEST123", | ||||
|     version="1.1.1", | ||||
|     version="1.3.0", | ||||
|     monitoring_type=cycle(["workstation", "server"]), | ||||
|     salt_id=generate_agent_id("DESKTOP-TEST123"), | ||||
|     agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123", | ||||
|   | ||||
| @@ -7,14 +7,20 @@ import django.db.models.deletion | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('clients', '0006_deployment'), | ||||
|         ('agents', '0020_auto_20201025_2129'), | ||||
|         ("clients", "0006_deployment"), | ||||
|         ("agents", "0020_auto_20201025_2129"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='agent', | ||||
|             name='site_link', | ||||
|             field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agents', to='clients.site'), | ||||
|             model_name="agent", | ||||
|             name="site_link", | ||||
|             field=models.ForeignKey( | ||||
|                 blank=True, | ||||
|                 null=True, | ||||
|                 on_delete=django.db.models.deletion.SET_NULL, | ||||
|                 related_name="agents", | ||||
|                 to="clients.site", | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -6,16 +6,16 @@ from django.db import migrations | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0022_update_site_primary_key'), | ||||
|         ("agents", "0022_update_site_primary_key"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='agent', | ||||
|             name='client', | ||||
|             model_name="agent", | ||||
|             name="client", | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='agent', | ||||
|             name='site', | ||||
|             model_name="agent", | ||||
|             name="site", | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -6,13 +6,13 @@ from django.db import migrations | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0023_auto_20201101_2312'), | ||||
|         ("agents", "0023_auto_20201101_2312"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RenameField( | ||||
|             model_name='agent', | ||||
|             old_name='site_link', | ||||
|             new_name='site', | ||||
|             model_name="agent", | ||||
|             old_name="site_link", | ||||
|             new_name="site", | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -6,13 +6,22 @@ from django.db import migrations, models | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0024_auto_20201101_2319'), | ||||
|         ("agents", "0024_auto_20201101_2319"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='recoveryaction', | ||||
|             name='mode', | ||||
|             field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC')], default='mesh', max_length=50), | ||||
|             model_name="recoveryaction", | ||||
|             name="mode", | ||||
|             field=models.CharField( | ||||
|                 choices=[ | ||||
|                     ("salt", "Salt"), | ||||
|                     ("mesh", "Mesh"), | ||||
|                     ("command", "Command"), | ||||
|                     ("rpc", "Nats RPC"), | ||||
|                 ], | ||||
|                 default="mesh", | ||||
|                 max_length=50, | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -6,13 +6,23 @@ from django.db import migrations, models | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0025_auto_20201122_0407'), | ||||
|         ("agents", "0025_auto_20201122_0407"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='recoveryaction', | ||||
|             name='mode', | ||||
|             field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC'), ('checkrunner', 'Checkrunner')], default='mesh', max_length=50), | ||||
|             model_name="recoveryaction", | ||||
|             name="mode", | ||||
|             field=models.CharField( | ||||
|                 choices=[ | ||||
|                     ("salt", "Salt"), | ||||
|                     ("mesh", "Mesh"), | ||||
|                     ("command", "Command"), | ||||
|                     ("rpc", "Nats RPC"), | ||||
|                     ("checkrunner", "Checkrunner"), | ||||
|                 ], | ||||
|                 default="mesh", | ||||
|                 max_length=50, | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-29 21:11 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0026_auto_20201125_2334'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='agent', | ||||
|             name='overdue_dashboard_alert', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										23
									
								
								api/tacticalrmm/agents/migrations/0028_auto_20210206_1534.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								api/tacticalrmm/agents/migrations/0028_auto_20210206_1534.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-06 15:34 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0027_agent_overdue_dashboard_alert'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='agentoutage', | ||||
|             name='outage_email_sent_time', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='agentoutage', | ||||
|             name='outage_sms_sent_time', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										16
									
								
								api/tacticalrmm/agents/migrations/0029_delete_agentoutage.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								api/tacticalrmm/agents/migrations/0029_delete_agentoutage.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-10 21:56 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0028_auto_20210206_1534'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.DeleteModel( | ||||
|             name='AgentOutage', | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										18
									
								
								api/tacticalrmm/agents/migrations/0030_agent_offline_time.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								api/tacticalrmm/agents/migrations/0030_agent_offline_time.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.6 on 2021-02-16 08:50 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0029_delete_agentoutage'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='agent', | ||||
|             name='offline_time', | ||||
|             field=models.PositiveIntegerField(default=4), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,4 +1,3 @@ | ||||
| import requests | ||||
| import time | ||||
| import base64 | ||||
| from Crypto.Cipher import AES | ||||
| @@ -9,7 +8,10 @@ import validators | ||||
| import msgpack | ||||
| import re | ||||
| from collections import Counter | ||||
| from typing import List, Union, Any | ||||
| from loguru import logger | ||||
| import asyncio | ||||
|  | ||||
| from packaging import version as pyver | ||||
| from distutils.version import LooseVersion | ||||
| from nats.aio.client import Client as NATS | ||||
| @@ -18,6 +20,7 @@ from nats.aio.errors import ErrTimeout | ||||
| from django.db import models | ||||
| from django.conf import settings | ||||
| from django.utils import timezone as djangotime | ||||
| from alerts.models import AlertTemplate | ||||
|  | ||||
| from core.models import CoreSettings, TZ_CHOICES | ||||
| from logs.models import BaseAuditModel | ||||
| @@ -50,6 +53,8 @@ class Agent(BaseAuditModel): | ||||
|     mesh_node_id = models.CharField(null=True, blank=True, max_length=255) | ||||
|     overdue_email_alert = models.BooleanField(default=False) | ||||
|     overdue_text_alert = models.BooleanField(default=False) | ||||
|     overdue_dashboard_alert = models.BooleanField(default=False) | ||||
|     offline_time = models.PositiveIntegerField(default=4) | ||||
|     overdue_time = models.PositiveIntegerField(default=30) | ||||
|     check_interval = models.PositiveIntegerField(default=120) | ||||
|     needs_reboot = models.BooleanField(default=False) | ||||
| @@ -75,6 +80,24 @@ class Agent(BaseAuditModel): | ||||
|         on_delete=models.SET_NULL, | ||||
|     ) | ||||
|  | ||||
|     def save(self, *args, **kwargs): | ||||
|  | ||||
|         # get old agent if exists | ||||
|         old_agent = type(self).objects.get(pk=self.pk) if self.pk else None | ||||
|         super(BaseAuditModel, self).save(*args, **kwargs) | ||||
|  | ||||
|         # check if new agent has been create | ||||
|         # or check if policy have changed on agent | ||||
|         # or if site has changed on agent and if so generate-policies | ||||
|         if ( | ||||
|             not old_agent | ||||
|             or old_agent | ||||
|             and old_agent.policy != self.policy | ||||
|             or old_agent.site != self.site | ||||
|         ): | ||||
|             self.generate_checks_from_policies() | ||||
|             self.generate_tasks_from_policies() | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.hostname | ||||
|  | ||||
| @@ -117,14 +140,6 @@ class Agent(BaseAuditModel): | ||||
|             return settings.DL_32 | ||||
|         return None | ||||
|  | ||||
|     @property | ||||
|     def winsalt_dl(self): | ||||
|         if self.arch == "64": | ||||
|             return settings.SALT_64 | ||||
|         elif self.arch == "32": | ||||
|             return settings.SALT_32 | ||||
|         return None | ||||
|  | ||||
|     @property | ||||
|     def win_inno_exe(self): | ||||
|         if self.arch == "64": | ||||
| @@ -135,7 +150,7 @@ class Agent(BaseAuditModel): | ||||
|  | ||||
|     @property | ||||
|     def status(self): | ||||
|         offline = djangotime.now() - djangotime.timedelta(minutes=6) | ||||
|         offline = djangotime.now() - djangotime.timedelta(minutes=self.offline_time) | ||||
|         overdue = djangotime.now() - djangotime.timedelta(minutes=self.overdue_time) | ||||
|  | ||||
|         if self.last_seen is not None: | ||||
| @@ -164,13 +179,11 @@ class Agent(BaseAuditModel): | ||||
|                 elif i.status == "failing": | ||||
|                     failing += 1 | ||||
|  | ||||
|         has_failing_checks = True if failing > 0 else False | ||||
|  | ||||
|         ret = { | ||||
|             "total": total, | ||||
|             "passing": passing, | ||||
|             "failing": failing, | ||||
|             "has_failing_checks": has_failing_checks, | ||||
|             "has_failing_checks": failing > 0, | ||||
|         } | ||||
|         return ret | ||||
|  | ||||
| @@ -258,6 +271,63 @@ class Agent(BaseAuditModel): | ||||
|         except: | ||||
|             return ["unknown disk"] | ||||
|  | ||||
|     def run_script( | ||||
|         self, | ||||
|         scriptpk: int, | ||||
|         args: List[str] = [], | ||||
|         timeout: int = 120, | ||||
|         full: bool = False, | ||||
|         wait: bool = False, | ||||
|         run_on_any: bool = False, | ||||
|     ) -> Any: | ||||
|  | ||||
|         from scripts.models import Script | ||||
|  | ||||
|         script = Script.objects.get(pk=scriptpk) | ||||
|         data = { | ||||
|             "func": "runscriptfull" if full else "runscript", | ||||
|             "timeout": timeout, | ||||
|             "script_args": args, | ||||
|             "payload": { | ||||
|                 "code": script.code, | ||||
|                 "shell": script.shell, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         running_agent = self | ||||
|         if run_on_any: | ||||
|             nats_ping = {"func": "ping", "timeout": 1} | ||||
|  | ||||
|             # try on self first | ||||
|             r = asyncio.run(self.nats_cmd(nats_ping)) | ||||
|  | ||||
|             if r == "pong": | ||||
|                 running_agent = self | ||||
|             else: | ||||
|                 online = [ | ||||
|                     agent | ||||
|                     for agent in Agent.objects.only( | ||||
|                         "pk", "last_seen", "overdue_time", "offline_time" | ||||
|                     ) | ||||
|                     if agent.status == "online" | ||||
|                 ] | ||||
|  | ||||
|                 for agent in online: | ||||
|                     r = asyncio.run(agent.nats_cmd(nats_ping)) | ||||
|                     if r == "pong": | ||||
|                         running_agent = agent | ||||
|                         break | ||||
|  | ||||
|                 if running_agent.pk == self.pk: | ||||
|                     return "Unable to find an online agent" | ||||
|  | ||||
|         if wait: | ||||
|             return asyncio.run(running_agent.nats_cmd(data, timeout=timeout, wait=True)) | ||||
|         else: | ||||
|             asyncio.run(running_agent.nats_cmd(data, wait=False)) | ||||
|  | ||||
|         return "ok" | ||||
|  | ||||
|     # auto approves updates | ||||
|     def approve_updates(self): | ||||
|         patch_policy = self.get_patch_policy() | ||||
| @@ -384,14 +454,114 @@ class Agent(BaseAuditModel): | ||||
|  | ||||
|         return patch_policy | ||||
|  | ||||
|     # clear is used to delete managed policy checks from agent | ||||
|     # parent_checks specifies a list of checks to delete from agent with matching parent_check field | ||||
|     def generate_checks_from_policies(self, clear=False): | ||||
|         from automation.models import Policy | ||||
|     def get_approved_update_guids(self) -> List[str]: | ||||
|         return list( | ||||
|             self.winupdates.filter(action="approve", installed=False).values_list( | ||||
|                 "guid", flat=True | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|         # Clear agent checks managed by policy | ||||
|         if clear: | ||||
|             self.agentchecks.filter(managed_by_policy=True).delete() | ||||
|     # returns alert template assigned in the following order: policy, site, client, global | ||||
|     # will return None if nothing is found | ||||
|     def get_alert_template(self) -> Union[AlertTemplate, None]: | ||||
|  | ||||
|         site = self.site | ||||
|         client = self.client | ||||
|         core = CoreSettings.objects.first() | ||||
|  | ||||
|         templates = list() | ||||
|         # check if alert template is on a policy assigned to agent | ||||
|         if ( | ||||
|             self.policy | ||||
|             and self.policy.alert_template | ||||
|             and self.policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(self.policy.alert_template) | ||||
|  | ||||
|         # check if policy with alert template is assigned to the site | ||||
|         elif ( | ||||
|             self.monitoring_type == "server" | ||||
|             and site.server_policy | ||||
|             and site.server_policy.alert_template | ||||
|             and site.server_policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(site.server_policy.alert_template) | ||||
|         elif ( | ||||
|             self.monitoring_type == "workstation" | ||||
|             and site.workstation_policy | ||||
|             and site.workstation_policy.alert_template | ||||
|             and site.workstation_policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(site.workstation_policy.alert_template) | ||||
|  | ||||
|         # check if alert template is assigned to site | ||||
|         elif site.alert_template and site.alert_template.is_active: | ||||
|             templates.append(site.alert_template) | ||||
|  | ||||
|         # check if policy with alert template is assigned to the client | ||||
|         elif ( | ||||
|             self.monitoring_type == "server" | ||||
|             and client.server_policy | ||||
|             and client.server_policy.alert_template | ||||
|             and client.server_policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(client.server_policy.alert_template) | ||||
|         elif ( | ||||
|             self.monitoring_type == "workstation" | ||||
|             and client.workstation_policy | ||||
|             and client.workstation_policy.alert_template | ||||
|             and client.workstation_policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(client.workstation_policy.alert_template) | ||||
|  | ||||
|         # check if alert template is on client and return | ||||
|         elif client.alert_template and client.alert_template.is_active: | ||||
|             templates.append(client.alert_template) | ||||
|  | ||||
|         # check if alert template is applied globally and return | ||||
|         elif core.alert_template and core.alert_template.is_active: | ||||
|             templates.append(core.alert_template) | ||||
|  | ||||
|         # if agent is a workstation, check if policy with alert template is assigned to the site, client, or core | ||||
|         elif ( | ||||
|             self.monitoring_type == "server" | ||||
|             and core.server_policy | ||||
|             and core.server_policy.alert_template | ||||
|             and core.server_policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(core.server_policy.alert_template) | ||||
|         elif ( | ||||
|             self.monitoring_type == "workstation" | ||||
|             and core.workstation_policy | ||||
|             and core.workstation_policy.alert_template | ||||
|             and core.workstation_policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(core.workstation_policy.alert_template) | ||||
|  | ||||
|         # go through the templates and return the first one that isn't excluded | ||||
|         for template in templates: | ||||
|             # check if client, site, or agent has been excluded from template | ||||
|             if ( | ||||
|                 client.pk in template.excluded_clients.all() | ||||
|                 or site.pk in template.excluded_sites.all() | ||||
|                 or self.pk in template.excluded_agents.all() | ||||
|             ): | ||||
|                 continue | ||||
|  | ||||
|             # see if template is excluding desktops | ||||
|             if ( | ||||
|                 self.monitoring_type == "workstation" | ||||
|                 and not template.agent_include_desktops | ||||
|             ): | ||||
|                 continue | ||||
|             else: | ||||
|                 return template | ||||
|  | ||||
|         # no alert templates found or agent has been excluded | ||||
|         return None | ||||
|  | ||||
|     def generate_checks_from_policies(self): | ||||
|         from automation.models import Policy | ||||
|  | ||||
|         # Clear agent checks that have overriden_by_policy set | ||||
|         self.agentchecks.update(overriden_by_policy=False) | ||||
| @@ -399,17 +569,9 @@ class Agent(BaseAuditModel): | ||||
|         # Generate checks based on policies | ||||
|         Policy.generate_policy_checks(self) | ||||
|  | ||||
|     # clear is used to delete managed policy tasks from agent | ||||
|     # parent_tasks specifies a list of tasks to delete from agent with matching parent_task field | ||||
|     def generate_tasks_from_policies(self, clear=False): | ||||
|         from autotasks.tasks import delete_win_task_schedule | ||||
|     def generate_tasks_from_policies(self): | ||||
|         from automation.models import Policy | ||||
|  | ||||
|         # Clear agent tasks managed by policy | ||||
|         if clear: | ||||
|             for task in self.autotasks.filter(managed_by_policy=True): | ||||
|                 delete_win_task_schedule.delay(task.pk) | ||||
|  | ||||
|         # Generate tasks based on policies | ||||
|         Policy.generate_policy_tasks(self) | ||||
|  | ||||
| @@ -468,77 +630,6 @@ class Agent(BaseAuditModel): | ||||
|             await nc.flush() | ||||
|             await nc.close() | ||||
|  | ||||
|     def salt_api_cmd(self, **kwargs): | ||||
|  | ||||
|         # salt should always timeout first before the requests' timeout | ||||
|         try: | ||||
|             timeout = kwargs["timeout"] | ||||
|         except KeyError: | ||||
|             # default timeout | ||||
|             timeout = 15 | ||||
|             salt_timeout = 12 | ||||
|         else: | ||||
|             if timeout < 8: | ||||
|                 timeout = 8 | ||||
|                 salt_timeout = 5 | ||||
|             else: | ||||
|                 salt_timeout = timeout - 3 | ||||
|  | ||||
|         json = { | ||||
|             "client": "local", | ||||
|             "tgt": self.salt_id, | ||||
|             "fun": kwargs["func"], | ||||
|             "timeout": salt_timeout, | ||||
|             "username": settings.SALT_USERNAME, | ||||
|             "password": settings.SALT_PASSWORD, | ||||
|             "eauth": "pam", | ||||
|         } | ||||
|  | ||||
|         if "arg" in kwargs: | ||||
|             json.update({"arg": kwargs["arg"]}) | ||||
|         if "kwargs" in kwargs: | ||||
|             json.update({"kwarg": kwargs["kwargs"]}) | ||||
|  | ||||
|         try: | ||||
|             resp = requests.post( | ||||
|                 f"http://{settings.SALT_HOST}:8123/run", | ||||
|                 json=[json], | ||||
|                 timeout=timeout, | ||||
|             ) | ||||
|         except Exception: | ||||
|             return "timeout" | ||||
|  | ||||
|         try: | ||||
|             ret = resp.json()["return"][0][self.salt_id] | ||||
|         except Exception as e: | ||||
|             logger.error(f"{self.salt_id}: {e}") | ||||
|             return "error" | ||||
|         else: | ||||
|             return ret | ||||
|  | ||||
|     def salt_api_async(self, **kwargs): | ||||
|  | ||||
|         json = { | ||||
|             "client": "local_async", | ||||
|             "tgt": self.salt_id, | ||||
|             "fun": kwargs["func"], | ||||
|             "username": settings.SALT_USERNAME, | ||||
|             "password": settings.SALT_PASSWORD, | ||||
|             "eauth": "pam", | ||||
|         } | ||||
|  | ||||
|         if "arg" in kwargs: | ||||
|             json.update({"arg": kwargs["arg"]}) | ||||
|         if "kwargs" in kwargs: | ||||
|             json.update({"kwarg": kwargs["kwargs"]}) | ||||
|  | ||||
|         try: | ||||
|             resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json]) | ||||
|         except Exception: | ||||
|             return "timeout" | ||||
|  | ||||
|         return resp | ||||
|  | ||||
|     @staticmethod | ||||
|     def serialize(agent): | ||||
|         # serializes the agent and returns json | ||||
| @@ -549,32 +640,6 @@ class Agent(BaseAuditModel): | ||||
|         del ret["client"] | ||||
|         return ret | ||||
|  | ||||
|     @staticmethod | ||||
|     def salt_batch_async(**kwargs): | ||||
|         assert isinstance(kwargs["minions"], list) | ||||
|  | ||||
|         json = { | ||||
|             "client": "local_async", | ||||
|             "tgt_type": "list", | ||||
|             "tgt": kwargs["minions"], | ||||
|             "fun": kwargs["func"], | ||||
|             "username": settings.SALT_USERNAME, | ||||
|             "password": settings.SALT_PASSWORD, | ||||
|             "eauth": "pam", | ||||
|         } | ||||
|  | ||||
|         if "arg" in kwargs: | ||||
|             json.update({"arg": kwargs["arg"]}) | ||||
|         if "kwargs" in kwargs: | ||||
|             json.update({"kwarg": kwargs["kwargs"]}) | ||||
|  | ||||
|         try: | ||||
|             resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json]) | ||||
|         except Exception: | ||||
|             return "timeout" | ||||
|  | ||||
|         return resp | ||||
|  | ||||
|     def delete_superseded_updates(self): | ||||
|         try: | ||||
|             pks = []  # list of pks to delete | ||||
| @@ -627,73 +692,210 @@ class Agent(BaseAuditModel): | ||||
|                 elif action.details["action"] == "taskdelete": | ||||
|                     delete_win_task_schedule.delay(task_id, pending_action=action.id) | ||||
|  | ||||
|     # for clearing duplicate pending actions on agent | ||||
|     def remove_matching_pending_task_actions(self, task_id): | ||||
|         # remove any other pending actions on agent with same task_id | ||||
|         for action in self.pendingactions.exclude(status="completed"): | ||||
|             if action.details["task_id"] == task_id: | ||||
|                 action.delete() | ||||
|  | ||||
| class AgentOutage(models.Model): | ||||
|     agent = models.ForeignKey( | ||||
|         Agent, | ||||
|         related_name="agentoutages", | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         on_delete=models.CASCADE, | ||||
|     ) | ||||
|     outage_time = models.DateTimeField(auto_now_add=True) | ||||
|     recovery_time = models.DateTimeField(null=True, blank=True) | ||||
|     outage_email_sent = models.BooleanField(default=False) | ||||
|     outage_sms_sent = models.BooleanField(default=False) | ||||
|     recovery_email_sent = models.BooleanField(default=False) | ||||
|     recovery_sms_sent = models.BooleanField(default=False) | ||||
|     def handle_alert(self, checkin: bool = False) -> None: | ||||
|         from alerts.models import Alert | ||||
|         from agents.tasks import ( | ||||
|             agent_recovery_email_task, | ||||
|             agent_recovery_sms_task, | ||||
|             agent_outage_email_task, | ||||
|             agent_outage_sms_task, | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def is_active(self): | ||||
|         return False if self.recovery_time else True | ||||
|         # return if agent is in maintenace mode | ||||
|         if self.maintenance_mode: | ||||
|             return | ||||
|  | ||||
|         alert_template = self.get_alert_template() | ||||
|  | ||||
|         # called when agent is back online | ||||
|         if checkin: | ||||
|             if Alert.objects.filter(agent=self, resolved=False).exists(): | ||||
|  | ||||
|                 # resolve alert if exists | ||||
|                 alert = Alert.objects.get(agent=self, resolved=False) | ||||
|                 alert.resolve() | ||||
|  | ||||
|                 # check if a resolved notification should be emailed | ||||
|                 if ( | ||||
|                     not alert.resolved_email_sent | ||||
|                     and alert_template | ||||
|                     and alert_template.agent_email_on_resolved | ||||
|                     or self.overdue_email_alert | ||||
|                 ): | ||||
|                     agent_recovery_email_task.delay(pk=alert.pk) | ||||
|  | ||||
|                 # check if a resolved notification should be texted | ||||
|                 if ( | ||||
|                     not alert.resolved_sms_sent | ||||
|                     and alert_template | ||||
|                     and alert_template.agent_text_on_resolved | ||||
|                     or self.overdue_text_alert | ||||
|                 ): | ||||
|                     agent_recovery_sms_task.delay(pk=alert.pk) | ||||
|  | ||||
|                 # check if any scripts should be run | ||||
|                 if ( | ||||
|                     not alert.resolved_action_run | ||||
|                     and alert_template | ||||
|                     and alert_template.resolved_action | ||||
|                 ): | ||||
|                     r = self.run_script( | ||||
|                         scriptpk=alert_template.resolved_action.pk, | ||||
|                         args=alert_template.resolved_action_args, | ||||
|                         timeout=alert_template.resolved_action_timeout, | ||||
|                         wait=True, | ||||
|                         full=True, | ||||
|                         run_on_any=True, | ||||
|                     ) | ||||
|  | ||||
|                     # command was successful | ||||
|                     if type(r) == dict: | ||||
|                         alert.resolved_action_retcode = r["retcode"] | ||||
|                         alert.resolved_action_stdout = r["stdout"] | ||||
|                         alert.resolved_action_stderr = r["stderr"] | ||||
|                         alert.resolved_action_execution_time = "{:.4f}".format( | ||||
|                             r["execution_time"] | ||||
|                         ) | ||||
|                         alert.resolved_action_run = djangotime.now() | ||||
|                         alert.save() | ||||
|                     else: | ||||
|                         logger.error( | ||||
|                             f"Resolved action: {alert_template.resolved_action} failed to run on any agent for {self.hostname} resolved outage" | ||||
|                         ) | ||||
|  | ||||
|         # called when agent is offline | ||||
|         else: | ||||
|             # check if alert hasn't been created yet so create it | ||||
|             if not Alert.objects.filter(agent=self, resolved=False).exists(): | ||||
|  | ||||
|                 alert = Alert.create_availability_alert(self) | ||||
|  | ||||
|                 # add a null check history to allow gaps in graph | ||||
|                 for check in self.agentchecks.all(): | ||||
|                     check.add_check_history(None) | ||||
|             else: | ||||
|                 alert = Alert.objects.get(agent=self, resolved=False) | ||||
|  | ||||
|             # create dashboard alert if enabled | ||||
|             if ( | ||||
|                 alert_template | ||||
|                 and alert_template.agent_always_alert | ||||
|                 or self.overdue_dashboard_alert | ||||
|             ): | ||||
|                 alert.hidden = False | ||||
|                 alert.save() | ||||
|  | ||||
|             # send email alert if enabled | ||||
|             if ( | ||||
|                 not alert.email_sent | ||||
|                 and alert_template | ||||
|                 and alert_template.agent_always_email | ||||
|                 or self.overdue_email_alert | ||||
|             ): | ||||
|                 agent_outage_email_task.delay( | ||||
|                     pk=alert.pk, | ||||
|                     alert_interval=alert_template.check_periodic_alert_days | ||||
|                     if alert_template | ||||
|                     else None, | ||||
|                 ) | ||||
|  | ||||
|             # send text message if enabled | ||||
|             if ( | ||||
|                 not alert.sms_sent | ||||
|                 and alert_template | ||||
|                 and alert_template.agent_always_text | ||||
|                 or self.overdue_text_alert | ||||
|             ): | ||||
|                 agent_outage_sms_task.delay( | ||||
|                     pk=alert.pk, | ||||
|                     alert_interval=alert_template.check_periodic_alert_days | ||||
|                     if alert_template | ||||
|                     else None, | ||||
|                 ) | ||||
|  | ||||
|             # check if any scripts should be run | ||||
|             if not alert.action_run and alert_template and alert_template.action: | ||||
|                 r = self.run_script( | ||||
|                     scriptpk=alert_template.action.pk, | ||||
|                     args=alert_template.action_args, | ||||
|                     timeout=alert_template.action_timeout, | ||||
|                     wait=True, | ||||
|                     full=True, | ||||
|                     run_on_any=True, | ||||
|                 ) | ||||
|  | ||||
|                 # command was successful | ||||
|                 if type(r) == dict: | ||||
|                     alert.action_retcode = r["retcode"] | ||||
|                     alert.action_stdout = r["stdout"] | ||||
|                     alert.action_stderr = r["stderr"] | ||||
|                     alert.action_execution_time = "{:.4f}".format(r["execution_time"]) | ||||
|                     alert.action_run = djangotime.now() | ||||
|                     alert.save() | ||||
|                 else: | ||||
|                     logger.error( | ||||
|                         f"Failure action: {alert_template.action.name} failed to run on any agent for {self.hostname} outage" | ||||
|                     ) | ||||
|  | ||||
|     def send_outage_email(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.get_alert_template() | ||||
|         CORE.send_mail( | ||||
|             f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue", | ||||
|             f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue", | ||||
|             ( | ||||
|                 f"Data has not been received from client {self.agent.client.name}, " | ||||
|                 f"site {self.agent.site.name}, " | ||||
|                 f"agent {self.agent.hostname} " | ||||
|                 f"Data has not been received from client {self.client.name}, " | ||||
|                 f"site {self.site.name}, " | ||||
|                 f"agent {self.hostname} " | ||||
|                 "within the expected time." | ||||
|             ), | ||||
|             alert_template=alert_template, | ||||
|         ) | ||||
|  | ||||
|     def send_recovery_email(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.get_alert_template() | ||||
|         CORE.send_mail( | ||||
|             f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received", | ||||
|             f"{self.client.name}, {self.site.name}, {self.hostname} - data received", | ||||
|             ( | ||||
|                 f"Data has been received from client {self.agent.client.name}, " | ||||
|                 f"site {self.agent.site.name}, " | ||||
|                 f"agent {self.agent.hostname} " | ||||
|                 f"Data has been received from client {self.client.name}, " | ||||
|                 f"site {self.site.name}, " | ||||
|                 f"agent {self.hostname} " | ||||
|                 "after an interruption in data transmission." | ||||
|             ), | ||||
|             alert_template=alert_template, | ||||
|         ) | ||||
|  | ||||
|     def send_outage_sms(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         alert_template = self.get_alert_template() | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         CORE.send_sms( | ||||
|             f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue" | ||||
|             f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue", | ||||
|             alert_template=alert_template, | ||||
|         ) | ||||
|  | ||||
|     def send_recovery_sms(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.get_alert_template() | ||||
|         CORE.send_sms( | ||||
|             f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received" | ||||
|             f"{self.client.name}, {self.site.name}, {self.hostname} - data received", | ||||
|             alert_template=alert_template, | ||||
|         ) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.agent.hostname | ||||
|  | ||||
|  | ||||
| RECOVERY_CHOICES = [ | ||||
|     ("salt", "Salt"), | ||||
|   | ||||
| @@ -34,6 +34,17 @@ class AgentSerializer(serializers.ModelSerializer): | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class AgentOverdueActionSerializer(serializers.ModelSerializer): | ||||
|     class Meta: | ||||
|         model = Agent | ||||
|         fields = [ | ||||
|             "pk", | ||||
|             "overdue_email_alert", | ||||
|             "overdue_text_alert", | ||||
|             "overdue_dashboard_alert", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class AgentTableSerializer(serializers.ModelSerializer): | ||||
|     patches_pending = serializers.ReadOnlyField(source="has_patches_pending") | ||||
|     pending_actions = serializers.SerializerMethodField() | ||||
| @@ -42,17 +53,31 @@ class AgentTableSerializer(serializers.ModelSerializer): | ||||
|     last_seen = serializers.SerializerMethodField() | ||||
|     client_name = serializers.ReadOnlyField(source="client.name") | ||||
|     site_name = serializers.ReadOnlyField(source="site.name") | ||||
|     logged_username = serializers.SerializerMethodField() | ||||
|     italic = serializers.SerializerMethodField() | ||||
|     policy = serializers.ReadOnlyField(source="policy.id") | ||||
|  | ||||
|     def get_pending_actions(self, obj): | ||||
|         return obj.pendingactions.filter(status="pending").count() | ||||
|  | ||||
|     def get_last_seen(self, obj): | ||||
|     def get_last_seen(self, obj) -> str: | ||||
|         if obj.time_zone is not None: | ||||
|             agent_tz = pytz.timezone(obj.time_zone) | ||||
|         else: | ||||
|             agent_tz = self.context["default_tz"] | ||||
|  | ||||
|         return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M:%S") | ||||
|         return obj.last_seen.astimezone(agent_tz).timestamp() | ||||
|  | ||||
|     def get_logged_username(self, obj) -> str: | ||||
|         if obj.logged_in_username == "None" and obj.status == "online": | ||||
|             return obj.last_logged_in_user | ||||
|         elif obj.logged_in_username != "None": | ||||
|             return obj.logged_in_username | ||||
|         else: | ||||
|             return "-" | ||||
|  | ||||
|     def get_italic(self, obj) -> bool: | ||||
|         return obj.logged_in_username == "None" and obj.status == "online" | ||||
|  | ||||
|     class Meta: | ||||
|         model = Agent | ||||
| @@ -70,12 +95,14 @@ class AgentTableSerializer(serializers.ModelSerializer): | ||||
|             "status", | ||||
|             "overdue_text_alert", | ||||
|             "overdue_email_alert", | ||||
|             "overdue_dashboard_alert", | ||||
|             "last_seen", | ||||
|             "boot_time", | ||||
|             "checks", | ||||
|             "logged_in_username", | ||||
|             "last_logged_in_user", | ||||
|             "maintenance_mode", | ||||
|             "logged_username", | ||||
|             "italic", | ||||
|             "policy", | ||||
|         ] | ||||
|         depth = 2 | ||||
|  | ||||
| @@ -101,10 +128,12 @@ class AgentEditSerializer(serializers.ModelSerializer): | ||||
|             "timezone", | ||||
|             "check_interval", | ||||
|             "overdue_time", | ||||
|             "offline_time", | ||||
|             "overdue_text_alert", | ||||
|             "overdue_email_alert", | ||||
|             "all_timezones", | ||||
|             "winupdatepolicy", | ||||
|             "policy", | ||||
|         ] | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -2,321 +2,259 @@ import asyncio | ||||
| from loguru import logger | ||||
| from time import sleep | ||||
| import random | ||||
| import requests | ||||
| from packaging import version as pyver | ||||
| from typing import List, Union | ||||
| import datetime as dt | ||||
|  | ||||
| from django.utils import timezone as djangotime | ||||
| from django.conf import settings | ||||
| from scripts.models import Script | ||||
|  | ||||
| from tacticalrmm.celery import app | ||||
| from agents.models import Agent, AgentOutage | ||||
| from agents.models import Agent | ||||
| from core.models import CoreSettings | ||||
| from logs.models import PendingAction | ||||
|  | ||||
| logger.configure(**settings.LOG_CONFIG) | ||||
|  | ||||
| OLD_64_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2.exe" | ||||
| OLD_32_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2-x86.exe" | ||||
|  | ||||
| def agent_update(pk: int) -> str: | ||||
|     agent = Agent.objects.get(pk=pk) | ||||
|  | ||||
|     if pyver.parse(agent.version) <= pyver.parse("1.1.11"): | ||||
|         logger.warning( | ||||
|             f"{agent.hostname} v{agent.version} is running an unsupported version. Refusing to auto update." | ||||
|         ) | ||||
|         return "not supported" | ||||
|  | ||||
|     # skip if we can't determine the arch | ||||
|     if agent.arch is None: | ||||
|         logger.warning( | ||||
|             f"Unable to determine arch on {agent.hostname}. Skipping agent update." | ||||
|         ) | ||||
|         return "noarch" | ||||
|  | ||||
|     # removed sqlite in 1.4.0 to get rid of cgo dependency | ||||
|     # 1.3.0 has migration func to move from sqlite to win registry, so force an upgrade to 1.3.0 if old agent | ||||
|     if pyver.parse(agent.version) >= pyver.parse("1.3.0"): | ||||
|         version = settings.LATEST_AGENT_VER | ||||
|         url = agent.winagent_dl | ||||
|         inno = agent.win_inno_exe | ||||
|     else: | ||||
|         version = "1.3.0" | ||||
|         inno = ( | ||||
|             "winagent-v1.3.0.exe" if agent.arch == "64" else "winagent-v1.3.0-x86.exe" | ||||
|         ) | ||||
|         url = f"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/{inno}" | ||||
|  | ||||
|     if agent.pendingactions.filter( | ||||
|         action_type="agentupdate", status="pending" | ||||
|     ).exists(): | ||||
|         agent.pendingactions.filter( | ||||
|             action_type="agentupdate", status="pending" | ||||
|         ).delete() | ||||
|  | ||||
|     PendingAction.objects.create( | ||||
|         agent=agent, | ||||
|         action_type="agentupdate", | ||||
|         details={ | ||||
|             "url": url, | ||||
|             "version": version, | ||||
|             "inno": inno, | ||||
|         }, | ||||
|     ) | ||||
|  | ||||
|     nats_data = { | ||||
|         "func": "agentupdate", | ||||
|         "payload": { | ||||
|             "url": url, | ||||
|             "version": version, | ||||
|             "inno": inno, | ||||
|         }, | ||||
|     } | ||||
|     asyncio.run(agent.nats_cmd(nats_data, wait=False)) | ||||
|     return "created" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def send_agent_update_task(pks, version): | ||||
|     assert isinstance(pks, list) | ||||
|  | ||||
|     q = Agent.objects.filter(pk__in=pks) | ||||
|     agents = [i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)] | ||||
|  | ||||
|     chunks = (agents[i : i + 30] for i in range(0, len(agents), 30)) | ||||
|  | ||||
| def send_agent_update_task(pks: List[int]) -> None: | ||||
|     chunks = (pks[i : i + 30] for i in range(0, len(pks), 30)) | ||||
|     for chunk in chunks: | ||||
|         for pk in chunk: | ||||
|             agent = Agent.objects.get(pk=pk) | ||||
|  | ||||
|             # skip if we can't determine the arch | ||||
|             if agent.arch is None: | ||||
|                 logger.warning( | ||||
|                     f"Unable to determine arch on {agent.salt_id}. Skipping." | ||||
|                 ) | ||||
|                 continue | ||||
|  | ||||
|             # golang agent only backwards compatible with py agent 0.11.2 | ||||
|             # force an upgrade to the latest python agent if version < 0.11.2 | ||||
|             if pyver.parse(agent.version) < pyver.parse("0.11.2"): | ||||
|                 url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT | ||||
|                 inno = ( | ||||
|                     "winagent-v0.11.2.exe" | ||||
|                     if agent.arch == "64" | ||||
|                     else "winagent-v0.11.2-x86.exe" | ||||
|                 ) | ||||
|             else: | ||||
|                 url = agent.winagent_dl | ||||
|                 inno = agent.win_inno_exe | ||||
|  | ||||
|             if agent.has_nats: | ||||
|                 if agent.pendingactions.filter( | ||||
|                     action_type="agentupdate", status="pending" | ||||
|                 ).exists(): | ||||
|                     action = agent.pendingactions.filter( | ||||
|                         action_type="agentupdate", status="pending" | ||||
|                     ).last() | ||||
|                     if pyver.parse(action.details["version"]) < pyver.parse( | ||||
|                         settings.LATEST_AGENT_VER | ||||
|                     ): | ||||
|                         action.delete() | ||||
|                     else: | ||||
|                         continue | ||||
|  | ||||
|                 PendingAction.objects.create( | ||||
|                     agent=agent, | ||||
|                     action_type="agentupdate", | ||||
|                     details={ | ||||
|                         "url": agent.winagent_dl, | ||||
|                         "version": settings.LATEST_AGENT_VER, | ||||
|                         "inno": agent.win_inno_exe, | ||||
|                     }, | ||||
|                 ) | ||||
|             # TODO | ||||
|             # Salt is deprecated, remove this once salt is gone | ||||
|             else: | ||||
|                 r = agent.salt_api_async( | ||||
|                     func="win_agent.do_agent_update_v2", | ||||
|                     kwargs={ | ||||
|                         "inno": inno, | ||||
|                         "url": url, | ||||
|                     }, | ||||
|                 ) | ||||
|         sleep(5) | ||||
|             agent_update(pk) | ||||
|             sleep(0.05) | ||||
|         sleep(4) | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def auto_self_agent_update_task(): | ||||
| def auto_self_agent_update_task() -> None: | ||||
|     core = CoreSettings.objects.first() | ||||
|     if not core.agent_auto_update: | ||||
|         logger.info("Agent auto update is disabled. Skipping.") | ||||
|         return | ||||
|  | ||||
|     q = Agent.objects.only("pk", "version") | ||||
|     agents = [ | ||||
|     pks: List[int] = [ | ||||
|         i.pk | ||||
|         for i in q | ||||
|         if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER) | ||||
|     ] | ||||
|  | ||||
|     chunks = (agents[i : i + 30] for i in range(0, len(agents), 30)) | ||||
|  | ||||
|     chunks = (pks[i : i + 30] for i in range(0, len(pks), 30)) | ||||
|     for chunk in chunks: | ||||
|         for pk in chunk: | ||||
|             agent = Agent.objects.get(pk=pk) | ||||
|  | ||||
|             # skip if we can't determine the arch | ||||
|             if agent.arch is None: | ||||
|                 logger.warning( | ||||
|                     f"Unable to determine arch on {agent.salt_id}. Skipping." | ||||
|                 ) | ||||
|                 continue | ||||
|  | ||||
|             # golang agent only backwards compatible with py agent 0.11.2 | ||||
|             # force an upgrade to the latest python agent if version < 0.11.2 | ||||
|             if pyver.parse(agent.version) < pyver.parse("0.11.2"): | ||||
|                 url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT | ||||
|                 inno = ( | ||||
|                     "winagent-v0.11.2.exe" | ||||
|                     if agent.arch == "64" | ||||
|                     else "winagent-v0.11.2-x86.exe" | ||||
|                 ) | ||||
|             else: | ||||
|                 url = agent.winagent_dl | ||||
|                 inno = agent.win_inno_exe | ||||
|  | ||||
|             if agent.has_nats: | ||||
|                 if agent.pendingactions.filter( | ||||
|                     action_type="agentupdate", status="pending" | ||||
|                 ).exists(): | ||||
|                     action = agent.pendingactions.filter( | ||||
|                         action_type="agentupdate", status="pending" | ||||
|                     ).last() | ||||
|                     if pyver.parse(action.details["version"]) < pyver.parse( | ||||
|                         settings.LATEST_AGENT_VER | ||||
|                     ): | ||||
|                         action.delete() | ||||
|                     else: | ||||
|                         continue | ||||
|  | ||||
|                 PendingAction.objects.create( | ||||
|                     agent=agent, | ||||
|                     action_type="agentupdate", | ||||
|                     details={ | ||||
|                         "url": agent.winagent_dl, | ||||
|                         "version": settings.LATEST_AGENT_VER, | ||||
|                         "inno": agent.win_inno_exe, | ||||
|                     }, | ||||
|                 ) | ||||
|             # TODO | ||||
|             # Salt is deprecated, remove this once salt is gone | ||||
|             else: | ||||
|                 r = agent.salt_api_async( | ||||
|                     func="win_agent.do_agent_update_v2", | ||||
|                     kwargs={ | ||||
|                         "inno": inno, | ||||
|                         "url": url, | ||||
|                     }, | ||||
|                 ) | ||||
|         sleep(5) | ||||
|             agent_update(pk) | ||||
|             sleep(0.05) | ||||
|         sleep(4) | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def sync_sysinfo_task(): | ||||
|     agents = Agent.objects.all() | ||||
|     online = [ | ||||
|         i | ||||
|         for i in agents | ||||
|         if pyver.parse(i.version) >= pyver.parse("1.1.3") and i.status == "online" | ||||
|     ] | ||||
|     for agent in online: | ||||
|         asyncio.run(agent.nats_cmd({"func": "sync"}, wait=False)) | ||||
| def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
| @app.task | ||||
| def sync_salt_modules_task(pk): | ||||
|     agent = Agent.objects.get(pk=pk) | ||||
|     r = agent.salt_api_cmd(timeout=35, func="saltutil.sync_modules") | ||||
|     # successful sync if new/charnged files: {'return': [{'MINION-15': ['modules.get_eventlog', 'modules.win_agent', 'etc...']}]} | ||||
|     # successful sync with no new/changed files: {'return': [{'MINION-15': []}]} | ||||
|     if r == "timeout" or r == "error": | ||||
|         return f"Unable to sync modules {agent.salt_id}" | ||||
|  | ||||
|     return f"Successfully synced salt modules on {agent.hostname}" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def batch_sync_modules_task(): | ||||
|     # sync modules, split into chunks of 50 agents to not overload salt | ||||
|     agents = Agent.objects.all() | ||||
|     online = [i.salt_id for i in agents] | ||||
|     chunks = (online[i : i + 50] for i in range(0, len(online), 50)) | ||||
|     for chunk in chunks: | ||||
|         Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules") | ||||
|         sleep(10) | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def uninstall_agent_task(salt_id, has_nats): | ||||
|     attempts = 0 | ||||
|     error = False | ||||
|  | ||||
|     if not has_nats: | ||||
|         while 1: | ||||
|             try: | ||||
|  | ||||
|                 r = requests.post( | ||||
|                     f"http://{settings.SALT_HOST}:8123/run", | ||||
|                     json=[ | ||||
|                         { | ||||
|                             "client": "local", | ||||
|                             "tgt": salt_id, | ||||
|                             "fun": "win_agent.uninstall_agent", | ||||
|                             "timeout": 8, | ||||
|                             "username": settings.SALT_USERNAME, | ||||
|                             "password": settings.SALT_PASSWORD, | ||||
|                             "eauth": "pam", | ||||
|                         } | ||||
|                     ], | ||||
|                     timeout=10, | ||||
|                 ) | ||||
|                 ret = r.json()["return"][0][salt_id] | ||||
|             except Exception: | ||||
|                 attempts += 1 | ||||
|             else: | ||||
|                 if ret != "ok": | ||||
|                     attempts += 1 | ||||
|                 else: | ||||
|                     attempts = 0 | ||||
|  | ||||
|             if attempts >= 10: | ||||
|                 error = True | ||||
|                 break | ||||
|             elif attempts == 0: | ||||
|                 break | ||||
|  | ||||
|     if error: | ||||
|         logger.error(f"{salt_id} uninstall failed") | ||||
|     if not alert.email_sent: | ||||
|         sleep(random.randint(1, 15)) | ||||
|         alert.agent.send_outage_email() | ||||
|         alert.email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["email_sent"]) | ||||
|     else: | ||||
|         logger.info(f"{salt_id} was successfully uninstalled") | ||||
|  | ||||
|     try: | ||||
|         r = requests.post( | ||||
|             f"http://{settings.SALT_HOST}:8123/run", | ||||
|             json=[ | ||||
|                 { | ||||
|                     "client": "wheel", | ||||
|                     "fun": "key.delete", | ||||
|                     "match": salt_id, | ||||
|                     "username": settings.SALT_USERNAME, | ||||
|                     "password": settings.SALT_PASSWORD, | ||||
|                     "eauth": "pam", | ||||
|                 } | ||||
|             ], | ||||
|             timeout=30, | ||||
|         ) | ||||
|     except Exception: | ||||
|         logger.error(f"{salt_id} unable to remove salt-key") | ||||
|         if alert_interval: | ||||
|             # send an email only if the last email sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.email_sent < delta: | ||||
|                 sleep(random.randint(1, 10)) | ||||
|                 alert.agent.send_outage_email() | ||||
|                 alert.email_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["email_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_outage_email_task(pk): | ||||
| def agent_recovery_email_task(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     sleep(random.randint(1, 15)) | ||||
|     outage = AgentOutage.objects.get(pk=pk) | ||||
|     outage.send_outage_email() | ||||
|     outage.outage_email_sent = True | ||||
|     outage.save(update_fields=["outage_email_sent"]) | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|     alert.agent.send_recovery_email() | ||||
|     alert.resolved_email_sent = djangotime.now() | ||||
|     alert.save(update_fields=["resolved_email_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_recovery_email_task(pk): | ||||
|     sleep(random.randint(1, 15)) | ||||
|     outage = AgentOutage.objects.get(pk=pk) | ||||
|     outage.send_recovery_email() | ||||
|     outage.recovery_email_sent = True | ||||
|     outage.save(update_fields=["recovery_email_sent"]) | ||||
| def agent_outage_sms_task(pk: int, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     if not alert.sms_sent: | ||||
|         sleep(random.randint(1, 15)) | ||||
|         alert.agent.send_outage_sms() | ||||
|         alert.sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["sms_sent"]) | ||||
|     else: | ||||
|         if alert_interval: | ||||
|             # send an sms only if the last sms sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.sms_sent < delta: | ||||
|                 sleep(random.randint(1, 10)) | ||||
|                 alert.agent.send_outage_sms() | ||||
|                 alert.sms_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["sms_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_outage_sms_task(pk): | ||||
| def agent_recovery_sms_task(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     sleep(random.randint(1, 3)) | ||||
|     outage = AgentOutage.objects.get(pk=pk) | ||||
|     outage.send_outage_sms() | ||||
|     outage.outage_sms_sent = True | ||||
|     outage.save(update_fields=["outage_sms_sent"]) | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|     alert.agent.send_recovery_sms() | ||||
|     alert.resolved_sms_sent = djangotime.now() | ||||
|     alert.save(update_fields=["resolved_sms_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_recovery_sms_task(pk): | ||||
|     sleep(random.randint(1, 3)) | ||||
|     outage = AgentOutage.objects.get(pk=pk) | ||||
|     outage.send_recovery_sms() | ||||
|     outage.recovery_sms_sent = True | ||||
|     outage.save(update_fields=["recovery_sms_sent"]) | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_outages_task(): | ||||
| def agent_outages_task() -> None: | ||||
|     agents = Agent.objects.only( | ||||
|         "pk", "last_seen", "overdue_time", "overdue_email_alert", "overdue_text_alert" | ||||
|         "pk", | ||||
|         "last_seen", | ||||
|         "offline_time", | ||||
|         "overdue_time", | ||||
|         "overdue_email_alert", | ||||
|         "overdue_text_alert", | ||||
|         "overdue_dashboard_alert", | ||||
|     ) | ||||
|  | ||||
|     for agent in agents: | ||||
|         if agent.overdue_email_alert or agent.overdue_text_alert: | ||||
|             if agent.status == "overdue": | ||||
|                 outages = AgentOutage.objects.filter(agent=agent) | ||||
|                 if outages and outages.last().is_active: | ||||
|                     continue | ||||
|         if agent.status == "overdue": | ||||
|             agent.handle_alert() | ||||
|  | ||||
|                 outage = AgentOutage(agent=agent) | ||||
|                 outage.save() | ||||
|  | ||||
|                 if agent.overdue_email_alert and not agent.maintenance_mode: | ||||
|                     agent_outage_email_task.delay(pk=outage.pk) | ||||
| @app.task | ||||
| def handle_agent_recovery_task(pk: int) -> None: | ||||
|     sleep(10) | ||||
|     from agents.models import RecoveryAction | ||||
|  | ||||
|                 if agent.overdue_text_alert and not agent.maintenance_mode: | ||||
|                     agent_outage_sms_task.delay(pk=outage.pk) | ||||
|     action = RecoveryAction.objects.get(pk=pk) | ||||
|     if action.mode == "command": | ||||
|         data = {"func": "recoverycmd", "recoverycommand": action.command} | ||||
|     else: | ||||
|         data = {"func": "recover", "payload": {"mode": action.mode}} | ||||
|  | ||||
|     asyncio.run(action.agent.nats_cmd(data, wait=False)) | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def run_script_email_results_task( | ||||
|     agentpk: int, scriptpk: int, nats_timeout: int, emails: List[str] | ||||
| ): | ||||
|     agent = Agent.objects.get(pk=agentpk) | ||||
|     script = Script.objects.get(pk=scriptpk) | ||||
|     r = agent.run_script(scriptpk=script.pk, full=True, timeout=nats_timeout, wait=True) | ||||
|     if r == "timeout": | ||||
|         logger.error(f"{agent.hostname} timed out running script.") | ||||
|         return | ||||
|  | ||||
|     CORE = CoreSettings.objects.first() | ||||
|     subject = f"{agent.hostname} {script.name} Results" | ||||
|     exec_time = "{:.4f}".format(r["execution_time"]) | ||||
|     body = ( | ||||
|         subject | ||||
|         + f"\nReturn code: {r['retcode']}\nExecution time: {exec_time} seconds\nStdout: {r['stdout']}\nStderr: {r['stderr']}" | ||||
|     ) | ||||
|  | ||||
|     import smtplib | ||||
|     from email.message import EmailMessage | ||||
|  | ||||
|     msg = EmailMessage() | ||||
|     msg["Subject"] = subject | ||||
|     msg["From"] = CORE.smtp_from_email | ||||
|  | ||||
|     if emails: | ||||
|         msg["To"] = ", ".join(emails) | ||||
|     else: | ||||
|         msg["To"] = ", ".join(CORE.email_alert_recipients) | ||||
|  | ||||
|     msg.set_content(body) | ||||
|  | ||||
|     try: | ||||
|         with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server: | ||||
|             if CORE.smtp_requires_auth: | ||||
|                 server.ehlo() | ||||
|                 server.starttls() | ||||
|                 server.login(CORE.smtp_host_user, CORE.smtp_host_password) | ||||
|                 server.send_message(msg) | ||||
|                 server.quit() | ||||
|             else: | ||||
|                 server.send_message(msg) | ||||
|                 server.quit() | ||||
|     except Exception as e: | ||||
|         logger.error(e) | ||||
|   | ||||
| @@ -4,21 +4,19 @@ from unittest.mock import patch | ||||
|  | ||||
| from model_bakery import baker | ||||
| from itertools import cycle | ||||
| from typing import List | ||||
| from packaging import version as pyver | ||||
|  | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from logs.models import PendingAction | ||||
|  | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
| from .serializers import AgentSerializer | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
| from .models import Agent | ||||
| from .tasks import ( | ||||
|     auto_self_agent_update_task, | ||||
|     sync_salt_modules_task, | ||||
|     batch_sync_modules_task, | ||||
|     OLD_64_PY_AGENT, | ||||
|     OLD_32_PY_AGENT, | ||||
| ) | ||||
| from .tasks import auto_self_agent_update_task | ||||
| from winupdate.models import WinUpdatePolicy | ||||
|  | ||||
|  | ||||
| @@ -69,12 +67,34 @@ class TestAgentViews(TacticalTestCase): | ||||
|     @patch("agents.tasks.send_agent_update_task.delay") | ||||
|     def test_update_agents(self, mock_task): | ||||
|         url = "/agents/updateagents/" | ||||
|         data = {"pks": [1, 2, 3, 5, 10], "version": "0.11.1"} | ||||
|         baker.make_recipe( | ||||
|             "agents.agent", | ||||
|             operating_system="Windows 10 Pro, 64 bit (build 19041.450)", | ||||
|             version=settings.LATEST_AGENT_VER, | ||||
|             _quantity=15, | ||||
|         ) | ||||
|         baker.make_recipe( | ||||
|             "agents.agent", | ||||
|             operating_system="Windows 10 Pro, 64 bit (build 19041.450)", | ||||
|             version="1.3.0", | ||||
|             _quantity=15, | ||||
|         ) | ||||
|  | ||||
|         pks: List[int] = list( | ||||
|             Agent.objects.only("pk", "version").values_list("pk", flat=True) | ||||
|         ) | ||||
|  | ||||
|         data = {"pks": pks} | ||||
|         expected: List[int] = [ | ||||
|             i.pk | ||||
|             for i in Agent.objects.only("pk", "version") | ||||
|             if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER) | ||||
|         ] | ||||
|  | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         mock_task.assert_called_with(pks=data["pks"], version=data["version"]) | ||||
|         mock_task.assert_called_with(pks=expected) | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
| @@ -109,9 +129,8 @@ class TestAgentViews(TacticalTestCase): | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     @patch("agents.tasks.uninstall_agent_task.delay") | ||||
|     @patch("agents.views.reload_nats") | ||||
|     def test_uninstall(self, reload_nats, mock_task, nats_cmd): | ||||
|     def test_uninstall(self, reload_nats, nats_cmd): | ||||
|         url = "/agents/uninstall/" | ||||
|         data = {"pk": self.agent.pk} | ||||
|  | ||||
| @@ -120,13 +139,18 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         nats_cmd.assert_called_with({"func": "uninstall"}, wait=False) | ||||
|         reload_nats.assert_called_once() | ||||
|         mock_task.assert_called_with(self.agent.salt_id, True) | ||||
|  | ||||
|         self.check_not_authenticated("delete", url) | ||||
|  | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     def test_get_processes(self, mock_ret): | ||||
|         url = f"/agents/{self.agent.pk}/getprocs/" | ||||
|         agent_old = baker.make_recipe("agents.online_agent", version="1.1.12") | ||||
|         url_old = f"/agents/{agent_old.pk}/getprocs/" | ||||
|         r = self.client.get(url_old) | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|  | ||||
|         agent = baker.make_recipe("agents.online_agent", version="1.2.0") | ||||
|         url = f"/agents/{agent.pk}/getprocs/" | ||||
|  | ||||
|         with open( | ||||
|             os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/procs.json") | ||||
| @@ -136,9 +160,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         assert any(i["name"] == "Registry" for i in mock_ret.return_value) | ||||
|         assert any( | ||||
|             i["memory_percent"] == 0.004843281375620747 for i in mock_ret.return_value | ||||
|         ) | ||||
|         assert any(i["membytes"] == 434655234324 for i in mock_ret.return_value) | ||||
|  | ||||
|         mock_ret.return_value = "timeout" | ||||
|         r = self.client.get(url) | ||||
| @@ -165,18 +187,44 @@ class TestAgentViews(TacticalTestCase): | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     def test_get_event_log(self, mock_ret): | ||||
|         url = f"/agents/{self.agent.pk}/geteventlog/Application/30/" | ||||
|     def test_get_event_log(self, nats_cmd): | ||||
|         url = f"/agents/{self.agent.pk}/geteventlog/Application/22/" | ||||
|  | ||||
|         with open( | ||||
|             os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/appeventlog.json") | ||||
|         ) as f: | ||||
|             mock_ret.return_value = json.load(f) | ||||
|             nats_cmd.return_value = json.load(f) | ||||
|  | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "eventlog", | ||||
|                 "timeout": 30, | ||||
|                 "payload": { | ||||
|                     "logname": "Application", | ||||
|                     "days": str(22), | ||||
|                 }, | ||||
|             }, | ||||
|             timeout=32, | ||||
|         ) | ||||
|  | ||||
|         mock_ret.return_value = "timeout" | ||||
|         url = f"/agents/{self.agent.pk}/geteventlog/Security/6/" | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "eventlog", | ||||
|                 "timeout": 180, | ||||
|                 "payload": { | ||||
|                     "logname": "Security", | ||||
|                     "days": str(6), | ||||
|                 }, | ||||
|             }, | ||||
|             timeout=182, | ||||
|         ) | ||||
|  | ||||
|         nats_cmd.return_value = "timeout" | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|  | ||||
| @@ -311,7 +359,6 @@ class TestAgentViews(TacticalTestCase): | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertIn("rdp", r.json()["cmd"]) | ||||
|         self.assertNotIn("power", r.json()["cmd"]) | ||||
|         self.assertNotIn("ping", r.json()["cmd"]) | ||||
|  | ||||
|         data.update({"ping": 1, "power": 1}) | ||||
|         r = self.client.post(url, data, format="json") | ||||
| @@ -330,7 +377,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         data["mode"] = "salt" | ||||
|         data["mode"] = "mesh" | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|         self.assertIn("pending", r.json()) | ||||
| @@ -350,7 +397,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         self.agent.version = "0.9.4" | ||||
|         self.agent.save(update_fields=["version"]) | ||||
|         data["mode"] = "salt" | ||||
|         data["mode"] = "mesh" | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|         self.assertIn("0.9.5", r.json()) | ||||
| @@ -384,6 +431,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|             "site": site.id, | ||||
|             "monitoring_type": "workstation", | ||||
|             "description": "asjdk234andasd", | ||||
|             "offline_time": 4, | ||||
|             "overdue_time": 300, | ||||
|             "check_interval": 60, | ||||
|             "overdue_email_alert": True, | ||||
| @@ -482,42 +530,20 @@ class TestAgentViews(TacticalTestCase): | ||||
|     def test_overdue_action(self): | ||||
|         url = "/agents/overdueaction/" | ||||
|  | ||||
|         payload = {"pk": self.agent.pk, "alertType": "email", "action": "enabled"} | ||||
|         payload = {"pk": self.agent.pk, "overdue_email_alert": True} | ||||
|         r = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         agent = Agent.objects.get(pk=self.agent.pk) | ||||
|         self.assertTrue(agent.overdue_email_alert) | ||||
|         self.assertEqual(self.agent.hostname, r.data) | ||||
|  | ||||
|         payload.update({"alertType": "email", "action": "disabled"}) | ||||
|         r = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         agent = Agent.objects.get(pk=self.agent.pk) | ||||
|         self.assertFalse(agent.overdue_email_alert) | ||||
|         self.assertEqual(self.agent.hostname, r.data) | ||||
|  | ||||
|         payload.update({"alertType": "text", "action": "enabled"}) | ||||
|         r = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         agent = Agent.objects.get(pk=self.agent.pk) | ||||
|         self.assertTrue(agent.overdue_text_alert) | ||||
|         self.assertEqual(self.agent.hostname, r.data) | ||||
|  | ||||
|         payload.update({"alertType": "text", "action": "disabled"}) | ||||
|         payload = {"pk": self.agent.pk, "overdue_text_alert": False} | ||||
|         r = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         agent = Agent.objects.get(pk=self.agent.pk) | ||||
|         self.assertFalse(agent.overdue_text_alert) | ||||
|         self.assertEqual(self.agent.hostname, r.data) | ||||
|  | ||||
|         payload.update({"alertType": "email", "action": "523423"}) | ||||
|         r = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|  | ||||
|         payload.update({"alertType": "text", "action": "asdasd3434asdasd"}) | ||||
|         r = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     def test_list_agents_no_detail(self): | ||||
| @@ -538,7 +564,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     @patch("winupdate.tasks.bulk_check_for_updates_task.delay") | ||||
|     """ @patch("winupdate.tasks.bulk_check_for_updates_task.delay") | ||||
|     @patch("scripts.tasks.handle_bulk_script_task.delay") | ||||
|     @patch("scripts.tasks.handle_bulk_command_task.delay") | ||||
|     @patch("agents.models.Agent.salt_batch_async") | ||||
| @@ -549,6 +575,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         payload = { | ||||
|             "mode": "command", | ||||
|             "monType": "all", | ||||
|             "target": "agents", | ||||
|             "client": None, | ||||
|             "site": None, | ||||
| @@ -566,6 +593,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         payload = { | ||||
|             "mode": "command", | ||||
|             "monType": "servers", | ||||
|             "target": "agents", | ||||
|             "client": None, | ||||
|             "site": None, | ||||
| @@ -580,12 +608,11 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         payload = { | ||||
|             "mode": "command", | ||||
|             "monType": "workstations", | ||||
|             "target": "client", | ||||
|             "client": self.agent.client.id, | ||||
|             "site": None, | ||||
|             "agentPKs": [ | ||||
|                 self.agent.pk, | ||||
|             ], | ||||
|             "agentPKs": [], | ||||
|             "cmd": "gpupdate /force", | ||||
|             "timeout": 300, | ||||
|             "shell": "cmd", | ||||
| @@ -597,6 +624,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         payload = { | ||||
|             "mode": "command", | ||||
|             "monType": "all", | ||||
|             "target": "client", | ||||
|             "client": self.agent.client.id, | ||||
|             "site": self.agent.site.id, | ||||
| @@ -614,6 +642,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         payload = { | ||||
|             "mode": "scan", | ||||
|             "monType": "all", | ||||
|             "target": "agents", | ||||
|             "client": None, | ||||
|             "site": None, | ||||
| @@ -627,6 +656,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         payload = { | ||||
|             "mode": "install", | ||||
|             "monType": "all", | ||||
|             "target": "client", | ||||
|             "client": self.agent.client.id, | ||||
|             "site": None, | ||||
| @@ -648,7 +678,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         # TODO mock the script | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|         self.check_not_authenticated("post", url) """ | ||||
|  | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     def test_recover_mesh(self, nats_cmd): | ||||
| @@ -675,6 +705,7 @@ class TestAgentViews(TacticalTestCase): | ||||
| class TestAgentViewsNew(TacticalTestCase): | ||||
|     def setUp(self): | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|  | ||||
|     def test_agent_counts(self): | ||||
|         url = "/agents/agent_counts/" | ||||
| @@ -685,15 +716,12 @@ class TestAgentViewsNew(TacticalTestCase): | ||||
|             monitoring_type=cycle(["server", "workstation"]), | ||||
|             _quantity=6, | ||||
|         ) | ||||
|         agents = baker.make_recipe( | ||||
|         baker.make_recipe( | ||||
|             "agents.overdue_agent", | ||||
|             monitoring_type=cycle(["server", "workstation"]), | ||||
|             _quantity=6, | ||||
|         ) | ||||
|  | ||||
|         # make an AgentOutage for every overdue agent | ||||
|         baker.make("agents.AgentOutage", agent=cycle(agents), _quantity=6) | ||||
|  | ||||
|         # returned data should be this | ||||
|         data = { | ||||
|             "total_server_count": 6, | ||||
| @@ -750,142 +778,102 @@ class TestAgentTasks(TacticalTestCase): | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|  | ||||
|     @patch("agents.models.Agent.salt_api_cmd") | ||||
|     def test_sync_salt_modules_task(self, salt_api_cmd): | ||||
|         self.agent = baker.make_recipe("agents.agent") | ||||
|         salt_api_cmd.return_value = {"return": [{f"{self.agent.salt_id}": []}]} | ||||
|         ret = sync_salt_modules_task.s(self.agent.pk).apply() | ||||
|         salt_api_cmd.assert_called_with(timeout=35, func="saltutil.sync_modules") | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     def test_agent_update(self, nats_cmd): | ||||
|         from agents.tasks import agent_update | ||||
|  | ||||
|         agent_noarch = baker.make_recipe( | ||||
|             "agents.agent", | ||||
|             operating_system="Error getting OS", | ||||
|             version=settings.LATEST_AGENT_VER, | ||||
|         ) | ||||
|         r = agent_update(agent_noarch.pk) | ||||
|         self.assertEqual(r, "noarch") | ||||
|  | ||||
|         agent_1111 = baker.make_recipe( | ||||
|             "agents.agent", | ||||
|             operating_system="Windows 10 Pro, 64 bit (build 19041.450)", | ||||
|             version="1.1.11", | ||||
|         ) | ||||
|         r = agent_update(agent_1111.pk) | ||||
|         self.assertEqual(r, "not supported") | ||||
|  | ||||
|         agent64_1112 = baker.make_recipe( | ||||
|             "agents.agent", | ||||
|             operating_system="Windows 10 Pro, 64 bit (build 19041.450)", | ||||
|             version="1.1.12", | ||||
|         ) | ||||
|  | ||||
|         r = agent_update(agent64_1112.pk) | ||||
|         self.assertEqual(r, "created") | ||||
|         action = PendingAction.objects.get(agent__pk=agent64_1112.pk) | ||||
|         self.assertEqual(action.action_type, "agentupdate") | ||||
|         self.assertEqual(action.status, "pending") | ||||
|         self.assertEqual( | ||||
|             ret.result, f"Successfully synced salt modules on {self.agent.hostname}" | ||||
|             action.details["url"], | ||||
|             "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe", | ||||
|         ) | ||||
|         self.assertEqual(ret.status, "SUCCESS") | ||||
|  | ||||
|         salt_api_cmd.return_value = "timeout" | ||||
|         ret = sync_salt_modules_task.s(self.agent.pk).apply() | ||||
|         self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}") | ||||
|  | ||||
|         salt_api_cmd.return_value = "error" | ||||
|         ret = sync_salt_modules_task.s(self.agent.pk).apply() | ||||
|         self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}") | ||||
|  | ||||
|     @patch("agents.models.Agent.salt_batch_async", return_value=None) | ||||
|     @patch("agents.tasks.sleep", return_value=None) | ||||
|     def test_batch_sync_modules_task(self, mock_sleep, salt_batch_async): | ||||
|         # chunks of 50, should run 4 times | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", last_seen=djangotime.now(), _quantity=60 | ||||
|         self.assertEqual(action.details["inno"], "winagent-v1.3.0.exe") | ||||
|         self.assertEqual(action.details["version"], "1.3.0") | ||||
|         nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "agentupdate", | ||||
|                 "payload": { | ||||
|                     "url": "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe", | ||||
|                     "version": "1.3.0", | ||||
|                     "inno": "winagent-v1.3.0.exe", | ||||
|                 }, | ||||
|             }, | ||||
|             wait=False, | ||||
|         ) | ||||
|         baker.make_recipe( | ||||
|             "agents.overdue_agent", | ||||
|             last_seen=djangotime.now() - djangotime.timedelta(minutes=9), | ||||
|             _quantity=115, | ||||
|         ) | ||||
|         ret = batch_sync_modules_task.s().apply() | ||||
|         self.assertEqual(salt_batch_async.call_count, 4) | ||||
|         self.assertEqual(ret.status, "SUCCESS") | ||||
|  | ||||
|     @patch("agents.models.Agent.salt_api_async") | ||||
|     @patch("agents.tasks.sleep", return_value=None) | ||||
|     def test_auto_self_agent_update_task(self, mock_sleep, salt_api_async): | ||||
|         # test 64bit golang agent | ||||
|         self.agent64 = baker.make_recipe( | ||||
|         agent_64_130 = baker.make_recipe( | ||||
|             "agents.agent", | ||||
|             operating_system="Windows 10 Pro, 64 bit (build 19041.450)", | ||||
|             version="1.0.0", | ||||
|             version="1.3.0", | ||||
|         ) | ||||
|         salt_api_async.return_value = True | ||||
|         ret = auto_self_agent_update_task.s().apply() | ||||
|         salt_api_async.assert_called_with( | ||||
|             func="win_agent.do_agent_update_v2", | ||||
|             kwargs={ | ||||
|                 "inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe", | ||||
|                 "url": settings.DL_64, | ||||
|         nats_cmd.return_value = "ok" | ||||
|         r = agent_update(agent_64_130.pk) | ||||
|         self.assertEqual(r, "created") | ||||
|         nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "agentupdate", | ||||
|                 "payload": { | ||||
|                     "url": settings.DL_64, | ||||
|                     "version": settings.LATEST_AGENT_VER, | ||||
|                     "inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe", | ||||
|                 }, | ||||
|             }, | ||||
|             wait=False, | ||||
|         ) | ||||
|         self.assertEqual(ret.status, "SUCCESS") | ||||
|         self.agent64.delete() | ||||
|         salt_api_async.reset_mock() | ||||
|         action = PendingAction.objects.get(agent__pk=agent_64_130.pk) | ||||
|         self.assertEqual(action.action_type, "agentupdate") | ||||
|         self.assertEqual(action.status, "pending") | ||||
|  | ||||
|         # test 32bit golang agent | ||||
|         self.agent32 = baker.make_recipe( | ||||
|             "agents.agent", | ||||
|             operating_system="Windows 7 Professional, 32 bit (build 7601.24544)", | ||||
|             version="1.0.0", | ||||
|         ) | ||||
|         salt_api_async.return_value = True | ||||
|         ret = auto_self_agent_update_task.s().apply() | ||||
|         salt_api_async.assert_called_with( | ||||
|             func="win_agent.do_agent_update_v2", | ||||
|             kwargs={ | ||||
|                 "inno": f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe", | ||||
|                 "url": settings.DL_32, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual(ret.status, "SUCCESS") | ||||
|         self.agent32.delete() | ||||
|         salt_api_async.reset_mock() | ||||
|  | ||||
|         # test agent that has a null os field | ||||
|         self.agentNone = baker.make_recipe( | ||||
|             "agents.agent", | ||||
|             operating_system=None, | ||||
|             version="1.0.0", | ||||
|         ) | ||||
|         ret = auto_self_agent_update_task.s().apply() | ||||
|         salt_api_async.assert_not_called() | ||||
|         self.agentNone.delete() | ||||
|         salt_api_async.reset_mock() | ||||
|  | ||||
|         # test auto update disabled in global settings | ||||
|         self.agent64 = baker.make_recipe( | ||||
|     @patch("agents.tasks.agent_update") | ||||
|     @patch("agents.tasks.sleep", return_value=None) | ||||
|     def test_auto_self_agent_update_task(self, mock_sleep, agent_update): | ||||
|         baker.make_recipe( | ||||
|             "agents.agent", | ||||
|             operating_system="Windows 10 Pro, 64 bit (build 19041.450)", | ||||
|             version="1.0.0", | ||||
|             version=settings.LATEST_AGENT_VER, | ||||
|             _quantity=23, | ||||
|         ) | ||||
|         baker.make_recipe( | ||||
|             "agents.agent", | ||||
|             operating_system="Windows 10 Pro, 64 bit (build 19041.450)", | ||||
|             version="1.3.0", | ||||
|             _quantity=33, | ||||
|         ) | ||||
|  | ||||
|         self.coresettings.agent_auto_update = False | ||||
|         self.coresettings.save(update_fields=["agent_auto_update"]) | ||||
|         ret = auto_self_agent_update_task.s().apply() | ||||
|         salt_api_async.assert_not_called() | ||||
|  | ||||
|         # reset core settings | ||||
|         self.agent64.delete() | ||||
|         salt_api_async.reset_mock() | ||||
|         r = auto_self_agent_update_task.s().apply() | ||||
|         self.assertEqual(agent_update.call_count, 0) | ||||
|  | ||||
|         self.coresettings.agent_auto_update = True | ||||
|         self.coresettings.save(update_fields=["agent_auto_update"]) | ||||
|  | ||||
|         # test 64bit python agent | ||||
|         self.agent64py = baker.make_recipe( | ||||
|             "agents.agent", | ||||
|             operating_system="Windows 10 Pro, 64 bit (build 19041.450)", | ||||
|             version="0.11.1", | ||||
|         ) | ||||
|         salt_api_async.return_value = True | ||||
|         ret = auto_self_agent_update_task.s().apply() | ||||
|         salt_api_async.assert_called_with( | ||||
|             func="win_agent.do_agent_update_v2", | ||||
|             kwargs={ | ||||
|                 "inno": "winagent-v0.11.2.exe", | ||||
|                 "url": OLD_64_PY_AGENT, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual(ret.status, "SUCCESS") | ||||
|         self.agent64py.delete() | ||||
|         salt_api_async.reset_mock() | ||||
|  | ||||
|         # test 32bit python agent | ||||
|         self.agent32py = baker.make_recipe( | ||||
|             "agents.agent", | ||||
|             operating_system="Windows 7 Professional, 32 bit (build 7601.24544)", | ||||
|             version="0.11.1", | ||||
|         ) | ||||
|         salt_api_async.return_value = True | ||||
|         ret = auto_self_agent_update_task.s().apply() | ||||
|         salt_api_async.assert_called_with( | ||||
|             func="win_agent.do_agent_update_v2", | ||||
|             kwargs={ | ||||
|                 "inno": "winagent-v0.11.2-x86.exe", | ||||
|                 "url": OLD_32_PY_AGENT, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual(ret.status, "SUCCESS") | ||||
|         r = auto_self_agent_update_task.s().apply() | ||||
|         self.assertEqual(agent_update.call_count, 33) | ||||
|   | ||||
| @@ -7,6 +7,7 @@ import random | ||||
| import string | ||||
| import datetime as dt | ||||
| from packaging import version as pyver | ||||
| from typing import List | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.shortcuts import get_object_or_404 | ||||
| @@ -17,7 +18,7 @@ from rest_framework.views import APIView | ||||
| from rest_framework.response import Response | ||||
| from rest_framework import status, generics | ||||
|  | ||||
| from .models import Agent, AgentOutage, RecoveryAction, Note | ||||
| from .models import Agent, RecoveryAction, Note | ||||
| from core.models import CoreSettings | ||||
| from scripts.models import Script | ||||
| from logs.models import AuditLog, PendingAction | ||||
| @@ -29,11 +30,15 @@ from .serializers import ( | ||||
|     AgentEditSerializer, | ||||
|     NoteSerializer, | ||||
|     NotesSerializer, | ||||
|     AgentOverdueActionSerializer, | ||||
| ) | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
|  | ||||
| from .tasks import uninstall_agent_task, send_agent_update_task | ||||
| from winupdate.tasks import bulk_check_for_updates_task | ||||
| from .tasks import ( | ||||
|     send_agent_update_task, | ||||
|     run_script_email_results_task, | ||||
| ) | ||||
| from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task | ||||
| from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task | ||||
|  | ||||
| from tacticalrmm.utils import notify_error, reload_nats | ||||
| @@ -54,9 +59,13 @@ def get_agent_versions(request): | ||||
|  | ||||
| @api_view(["POST"]) | ||||
| def update_agents(request): | ||||
|     pks = request.data["pks"] | ||||
|     version = request.data["version"] | ||||
|     send_agent_update_task.delay(pks=pks, version=version) | ||||
|     q = Agent.objects.filter(pk__in=request.data["pks"]).only("pk", "version") | ||||
|     pks: List[int] = [ | ||||
|         i.pk | ||||
|         for i in q | ||||
|         if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER) | ||||
|     ] | ||||
|     send_agent_update_task.delay(pks=pks) | ||||
|     return Response("ok") | ||||
|  | ||||
|  | ||||
| @@ -68,10 +77,6 @@ def ping(request, pk): | ||||
|         r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5)) | ||||
|         if r == "pong": | ||||
|             status = "online" | ||||
|     else: | ||||
|         r = agent.salt_api_cmd(timeout=5, func="test.ping") | ||||
|         if isinstance(r, bool) and r: | ||||
|             status = "online" | ||||
|  | ||||
|     return Response({"name": agent.hostname, "status": status}) | ||||
|  | ||||
| @@ -82,13 +87,9 @@ def uninstall(request): | ||||
|     if agent.has_nats: | ||||
|         asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False)) | ||||
|  | ||||
|     salt_id = agent.salt_id | ||||
|     name = agent.hostname | ||||
|     has_nats = agent.has_nats | ||||
|     agent.delete() | ||||
|     reload_nats() | ||||
|  | ||||
|     uninstall_agent_task.delay(salt_id, has_nats) | ||||
|     return Response(f"{name} will now be uninstalled.") | ||||
|  | ||||
|  | ||||
| @@ -96,22 +97,17 @@ def uninstall(request): | ||||
| def edit_agent(request): | ||||
|     agent = get_object_or_404(Agent, pk=request.data["id"]) | ||||
|  | ||||
|     old_site = agent.site.pk | ||||
|     a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True) | ||||
|     a_serializer.is_valid(raise_exception=True) | ||||
|     a_serializer.save() | ||||
|  | ||||
|     policy = agent.winupdatepolicy.get() | ||||
|     p_serializer = WinUpdatePolicySerializer( | ||||
|         instance=policy, data=request.data["winupdatepolicy"][0] | ||||
|     ) | ||||
|     p_serializer.is_valid(raise_exception=True) | ||||
|     p_serializer.save() | ||||
|  | ||||
|     # check if site changed and initiate generating correct policies | ||||
|     if old_site != request.data["site"]: | ||||
|         agent.generate_checks_from_policies(clear=True) | ||||
|         agent.generate_tasks_from_policies(clear=True) | ||||
|     if "winupdatepolicy" in request.data.keys(): | ||||
|         policy = agent.winupdatepolicy.get() | ||||
|         p_serializer = WinUpdatePolicySerializer( | ||||
|             instance=policy, data=request.data["winupdatepolicy"][0] | ||||
|         ) | ||||
|         p_serializer.is_valid(raise_exception=True) | ||||
|         p_serializer.save() | ||||
|  | ||||
|     return Response("ok") | ||||
|  | ||||
| @@ -155,12 +151,12 @@ def agent_detail(request, pk): | ||||
| @api_view() | ||||
| def get_processes(request, pk): | ||||
|     agent = get_object_or_404(Agent, pk=pk) | ||||
|     if not agent.has_nats: | ||||
|         return notify_error("Requires agent version 1.1.0 or greater") | ||||
|     if pyver.parse(agent.version) < pyver.parse("1.2.0"): | ||||
|         return notify_error("Requires agent version 1.2.0 or greater") | ||||
|  | ||||
|     r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5)) | ||||
|     if r == "timeout": | ||||
|         return notify_error("Unable to contact the agent") | ||||
|  | ||||
|     return Response(r) | ||||
|  | ||||
|  | ||||
| @@ -187,15 +183,16 @@ def get_event_log(request, pk, logtype, days): | ||||
|     agent = get_object_or_404(Agent, pk=pk) | ||||
|     if not agent.has_nats: | ||||
|         return notify_error("Requires agent version 1.1.0 or greater") | ||||
|     timeout = 180 if logtype == "Security" else 30 | ||||
|     data = { | ||||
|         "func": "eventlog", | ||||
|         "timeout": 30, | ||||
|         "timeout": timeout, | ||||
|         "payload": { | ||||
|             "logname": logtype, | ||||
|             "days": str(days), | ||||
|         }, | ||||
|     } | ||||
|     r = asyncio.run(agent.nats_cmd(data, timeout=32)) | ||||
|     r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2)) | ||||
|     if r == "timeout": | ||||
|         return notify_error("Unable to contact the agent") | ||||
|  | ||||
| @@ -246,6 +243,7 @@ class AgentsTableList(generics.ListAPIView): | ||||
|             "overdue_text_alert", | ||||
|             "overdue_email_alert", | ||||
|             "overdue_time", | ||||
|             "offline_time", | ||||
|             "last_seen", | ||||
|             "boot_time", | ||||
|             "logged_in_username", | ||||
| @@ -294,6 +292,7 @@ def by_client(request, clientpk): | ||||
|             "overdue_text_alert", | ||||
|             "overdue_email_alert", | ||||
|             "overdue_time", | ||||
|             "offline_time", | ||||
|             "last_seen", | ||||
|             "boot_time", | ||||
|             "logged_in_username", | ||||
| @@ -323,6 +322,7 @@ def by_site(request, sitepk): | ||||
|             "overdue_text_alert", | ||||
|             "overdue_email_alert", | ||||
|             "overdue_time", | ||||
|             "offline_time", | ||||
|             "last_seen", | ||||
|             "boot_time", | ||||
|             "logged_in_username", | ||||
| @@ -337,26 +337,12 @@ def by_site(request, sitepk): | ||||
|  | ||||
| @api_view(["POST"]) | ||||
| def overdue_action(request): | ||||
|     pk = request.data["pk"] | ||||
|     alert_type = request.data["alertType"] | ||||
|     action = request.data["action"] | ||||
|     agent = get_object_or_404(Agent, pk=pk) | ||||
|     if alert_type == "email" and action == "enabled": | ||||
|         agent.overdue_email_alert = True | ||||
|         agent.save(update_fields=["overdue_email_alert"]) | ||||
|     elif alert_type == "email" and action == "disabled": | ||||
|         agent.overdue_email_alert = False | ||||
|         agent.save(update_fields=["overdue_email_alert"]) | ||||
|     elif alert_type == "text" and action == "enabled": | ||||
|         agent.overdue_text_alert = True | ||||
|         agent.save(update_fields=["overdue_text_alert"]) | ||||
|     elif alert_type == "text" and action == "disabled": | ||||
|         agent.overdue_text_alert = False | ||||
|         agent.save(update_fields=["overdue_text_alert"]) | ||||
|     else: | ||||
|         return Response( | ||||
|             {"error": "Something went wrong"}, status=status.HTTP_400_BAD_REQUEST | ||||
|         ) | ||||
|     agent = get_object_or_404(Agent, pk=request.data["pk"]) | ||||
|     serializer = AgentOverdueActionSerializer( | ||||
|         instance=agent, data=request.data, partial=True | ||||
|     ) | ||||
|     serializer.is_valid(raise_exception=True) | ||||
|     serializer.save() | ||||
|     return Response(agent.hostname) | ||||
|  | ||||
|  | ||||
| @@ -477,7 +463,7 @@ def install_agent(request): | ||||
|             f"GOARCH={goarch}", | ||||
|             go_bin, | ||||
|             "build", | ||||
|             f"-ldflags=\"-X 'main.Inno={inno}'", | ||||
|             f"-ldflags=\"-s -w -X 'main.Inno={inno}'", | ||||
|             f"-X 'main.Api={api}'", | ||||
|             f"-X 'main.Client={client_id}'", | ||||
|             f"-X 'main.Site={site_id}'", | ||||
| @@ -575,12 +561,10 @@ def install_agent(request): | ||||
|             "/VERYSILENT", | ||||
|             "/SUPPRESSMSGBOXES", | ||||
|             "&&", | ||||
|             "timeout", | ||||
|             "/t", | ||||
|             "10", | ||||
|             "/nobreak", | ||||
|             ">", | ||||
|             "NUL", | ||||
|             "ping", | ||||
|             "127.0.0.1", | ||||
|             "-n", | ||||
|             "5", | ||||
|             "&&", | ||||
|             r'"C:\Program Files\TacticalAgent\tacticalrmm.exe"', | ||||
|             "-m", | ||||
| @@ -607,8 +591,6 @@ def install_agent(request): | ||||
|         resp = { | ||||
|             "cmd": " ".join(str(i) for i in cmd), | ||||
|             "url": download_url, | ||||
|             "salt64": settings.SALT_64, | ||||
|             "salt32": settings.SALT_32, | ||||
|         } | ||||
|  | ||||
|         return Response(resp) | ||||
| @@ -669,17 +651,12 @@ def recover(request): | ||||
|         return notify_error("Only available in agent version greater than 0.9.5") | ||||
|  | ||||
|     if not agent.has_nats: | ||||
|         if mode == "tacagent" or mode == "checkrunner" or mode == "rpc": | ||||
|         if mode == "tacagent" or mode == "rpc": | ||||
|             return notify_error("Requires agent version 1.1.0 or greater") | ||||
|  | ||||
|     # attempt a realtime recovery if supported, otherwise fall back to old recovery method | ||||
|     if agent.has_nats: | ||||
|         if ( | ||||
|             mode == "tacagent" | ||||
|             or mode == "checkrunner" | ||||
|             or mode == "salt" | ||||
|             or mode == "mesh" | ||||
|         ): | ||||
|         if mode == "tacagent" or mode == "mesh": | ||||
|             data = {"func": "recover", "payload": {"mode": mode}} | ||||
|             r = asyncio.run(agent.nats_cmd(data, timeout=10)) | ||||
|             if r == "ok": | ||||
| @@ -725,22 +702,27 @@ def run_script(request): | ||||
|         script=script.name, | ||||
|     ) | ||||
|  | ||||
|     data = { | ||||
|         "func": "runscript", | ||||
|         "timeout": request.data["timeout"], | ||||
|         "script_args": request.data["args"], | ||||
|         "payload": { | ||||
|             "code": script.code, | ||||
|             "shell": script.shell, | ||||
|         }, | ||||
|     } | ||||
|  | ||||
|     if output == "wait": | ||||
|         r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout)) | ||||
|         r = agent.run_script(scriptpk=script.pk, timeout=req_timeout, wait=True) | ||||
|         return Response(r) | ||||
|  | ||||
|     elif output == "email": | ||||
|         if not pyver.parse(agent.version) >= pyver.parse("1.1.12"): | ||||
|             return notify_error("Requires agent version 1.1.12 or greater") | ||||
|  | ||||
|         emails = ( | ||||
|             [] if request.data["emailmode"] == "default" else request.data["emails"] | ||||
|         ) | ||||
|         run_script_email_results_task.delay( | ||||
|             agentpk=agent.pk, | ||||
|             scriptpk=script.pk, | ||||
|             nats_timeout=req_timeout, | ||||
|             emails=emails, | ||||
|         ) | ||||
|     else: | ||||
|         asyncio.run(agent.nats_cmd(data, wait=False)) | ||||
|         return Response(f"{script.name} will now be run on {agent.hostname}") | ||||
|         agent.run_script(scriptpk=script.pk, timeout=req_timeout) | ||||
|  | ||||
|     return Response(f"{script.name} will now be run on {agent.hostname}") | ||||
|  | ||||
|  | ||||
| @api_view() | ||||
| @@ -821,12 +803,16 @@ def bulk(request): | ||||
|     elif request.data["target"] == "agents": | ||||
|         q = Agent.objects.filter(pk__in=request.data["agentPKs"]) | ||||
|     elif request.data["target"] == "all": | ||||
|         q = Agent.objects.all() | ||||
|         q = Agent.objects.only("pk", "monitoring_type") | ||||
|     else: | ||||
|         return notify_error("Something went wrong") | ||||
|  | ||||
|     minions = [agent.salt_id for agent in q] | ||||
|     agents = [agent.pk for agent in q] | ||||
|     if request.data["monType"] == "servers": | ||||
|         q = q.filter(monitoring_type="server") | ||||
|     elif request.data["monType"] == "workstations": | ||||
|         q = q.filter(monitoring_type="workstation") | ||||
|  | ||||
|     agents: List[int] = [agent.pk for agent in q] | ||||
|  | ||||
|     AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data) | ||||
|  | ||||
| @@ -844,14 +830,12 @@ def bulk(request): | ||||
|         return Response(f"{script.name} will now be run on {len(agents)} agents") | ||||
|  | ||||
|     elif request.data["mode"] == "install": | ||||
|         r = Agent.salt_batch_async(minions=minions, func="win_agent.install_updates") | ||||
|         if r == "timeout": | ||||
|             return notify_error("Salt API not running") | ||||
|         bulk_install_updates_task.delay(agents) | ||||
|         return Response( | ||||
|             f"Pending updates will now be installed on {len(agents)} agents" | ||||
|         ) | ||||
|     elif request.data["mode"] == "scan": | ||||
|         bulk_check_for_updates_task.delay(minions=minions) | ||||
|         bulk_check_for_updates_task.delay(agents) | ||||
|         return Response(f"Patch status scan will now run on {len(agents)} agents") | ||||
|  | ||||
|     return notify_error("Something went wrong") | ||||
| @@ -859,20 +843,43 @@ def bulk(request): | ||||
|  | ||||
| @api_view(["POST"]) | ||||
| def agent_counts(request): | ||||
|  | ||||
|     server_offline_count = len( | ||||
|         [ | ||||
|             agent | ||||
|             for agent in Agent.objects.filter(monitoring_type="server").only( | ||||
|                 "pk", | ||||
|                 "last_seen", | ||||
|                 "overdue_time", | ||||
|                 "offline_time", | ||||
|             ) | ||||
|             if not agent.status == "online" | ||||
|         ] | ||||
|     ) | ||||
|  | ||||
|     workstation_offline_count = len( | ||||
|         [ | ||||
|             agent | ||||
|             for agent in Agent.objects.filter(monitoring_type="workstation").only( | ||||
|                 "pk", | ||||
|                 "last_seen", | ||||
|                 "overdue_time", | ||||
|                 "offline_time", | ||||
|             ) | ||||
|             if not agent.status == "online" | ||||
|         ] | ||||
|     ) | ||||
|  | ||||
|     return Response( | ||||
|         { | ||||
|             "total_server_count": Agent.objects.filter( | ||||
|                 monitoring_type="server" | ||||
|             ).count(), | ||||
|             "total_server_offline_count": AgentOutage.objects.filter( | ||||
|                 recovery_time=None, agent__monitoring_type="server" | ||||
|             ).count(), | ||||
|             "total_server_offline_count": server_offline_count, | ||||
|             "total_workstation_count": Agent.objects.filter( | ||||
|                 monitoring_type="workstation" | ||||
|             ).count(), | ||||
|             "total_workstation_offline_count": AgentOutage.objects.filter( | ||||
|                 recovery_time=None, agent__monitoring_type="workstation" | ||||
|             ).count(), | ||||
|             "total_workstation_offline_count": workstation_offline_count, | ||||
|         } | ||||
|     ) | ||||
|  | ||||
| @@ -909,4 +916,4 @@ class WMI(APIView): | ||||
|         r = asyncio.run(agent.nats_cmd({"func": "sysinfo"}, timeout=20)) | ||||
|         if r != "ok": | ||||
|             return notify_error("Unable to contact the agent") | ||||
|         return Response("ok") | ||||
|         return Response("ok") | ||||
|   | ||||
| @@ -1,6 +1,7 @@ | ||||
| from django.contrib import admin | ||||
|  | ||||
| from .models import Alert | ||||
| from .models import Alert, AlertTemplate | ||||
|  | ||||
|  | ||||
| admin.site.register(Alert) | ||||
| admin.site.register(AlertTemplate) | ||||
|   | ||||
| @@ -42,4 +42,4 @@ class Migration(migrations.Migration): | ||||
|                 ), | ||||
|             ], | ||||
|         ), | ||||
|     ] | ||||
|     ] | ||||
| @@ -27,4 +27,4 @@ class Migration(migrations.Migration): | ||||
|                 max_length=100, | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
|     ] | ||||
| @@ -7,19 +7,25 @@ import django.db.models.deletion | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0010_auto_20200922_1344'), | ||||
|         ('alerts', '0002_auto_20200815_1618'), | ||||
|         ("checks", "0010_auto_20200922_1344"), | ||||
|         ("alerts", "0002_auto_20200815_1618"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='assigned_check', | ||||
|             field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='checks.check'), | ||||
|             model_name="alert", | ||||
|             name="assigned_check", | ||||
|             field=models.ForeignKey( | ||||
|                 blank=True, | ||||
|                 null=True, | ||||
|                 on_delete=django.db.models.deletion.CASCADE, | ||||
|                 related_name="alert", | ||||
|                 to="checks.check", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alert', | ||||
|             name='alert_time', | ||||
|             model_name="alert", | ||||
|             name="alert_time", | ||||
|             field=models.DateTimeField(auto_now_add=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
|     ] | ||||
							
								
								
									
										172
									
								
								api/tacticalrmm/alerts/migrations/0004_auto_20210212_1408.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										172
									
								
								api/tacticalrmm/alerts/migrations/0004_auto_20210212_1408.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,172 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-12 14:08 | ||||
|  | ||||
| import django.contrib.postgres.fields | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0029_delete_agentoutage'), | ||||
|         ('clients', '0008_auto_20201103_1430'), | ||||
|         ('autotasks', '0017_auto_20210210_1512'), | ||||
|         ('scripts', '0005_auto_20201207_1606'), | ||||
|         ('alerts', '0003_auto_20201021_1815'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='action_execution_time', | ||||
|             field=models.CharField(blank=True, max_length=100, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='action_retcode', | ||||
|             field=models.IntegerField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='action_run', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='action_stderr', | ||||
|             field=models.TextField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='action_stdout', | ||||
|             field=models.TextField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='action_timeout', | ||||
|             field=models.PositiveIntegerField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='alert_type', | ||||
|             field=models.CharField(choices=[('availability', 'Availability'), ('check', 'Check'), ('task', 'Task'), ('custom', 'Custom')], default='availability', max_length=20), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='assigned_task', | ||||
|             field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='autotasks.automatedtask'), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='email_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='hidden', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_execution_time', | ||||
|             field=models.CharField(blank=True, max_length=100, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_retcode', | ||||
|             field=models.IntegerField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_run', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_stderr', | ||||
|             field=models.TextField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_stdout', | ||||
|             field=models.TextField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_timeout', | ||||
|             field=models.PositiveIntegerField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_email_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_on', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_sms_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='sms_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='snoozed', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alert', | ||||
|             name='severity', | ||||
|             field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=30), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name='AlertTemplate', | ||||
|             fields=[ | ||||
|                 ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), | ||||
|                 ('name', models.CharField(max_length=100)), | ||||
|                 ('is_active', models.BooleanField(default=True)), | ||||
|                 ('action_args', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)), | ||||
|                 ('resolved_action_args', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)), | ||||
|                 ('email_recipients', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100), blank=True, default=list, null=True, size=None)), | ||||
|                 ('text_recipients', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100), blank=True, default=list, null=True, size=None)), | ||||
|                 ('email_from', models.EmailField(blank=True, max_length=254, null=True)), | ||||
|                 ('agent_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('agent_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('agent_include_desktops', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('agent_always_email', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('agent_always_text', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('agent_always_alert', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('agent_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)), | ||||
|                 ('check_email_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)), | ||||
|                 ('check_text_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)), | ||||
|                 ('check_dashboard_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)), | ||||
|                 ('check_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('check_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('check_always_email', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('check_always_text', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('check_always_alert', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('check_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)), | ||||
|                 ('task_email_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)), | ||||
|                 ('task_text_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)), | ||||
|                 ('task_dashboard_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)), | ||||
|                 ('task_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('task_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('task_always_email', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('task_always_text', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('task_always_alert', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('task_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)), | ||||
|                 ('action', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='alert_template', to='scripts.script')), | ||||
|                 ('excluded_agents', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='agents.Agent')), | ||||
|                 ('excluded_clients', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='clients.Client')), | ||||
|                 ('excluded_sites', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='clients.Site')), | ||||
|                 ('resolved_action', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_alert_template', to='scripts.script')), | ||||
|             ], | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										31
									
								
								api/tacticalrmm/alerts/migrations/0005_auto_20210212_1745.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								api/tacticalrmm/alerts/migrations/0005_auto_20210212_1745.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,31 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-12 17:45 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('alerts', '0004_auto_20210212_1408'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='alert', | ||||
|             name='action_timeout', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_timeout', | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alerttemplate', | ||||
|             name='action_timeout', | ||||
|             field=models.PositiveIntegerField(default=15), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alerttemplate', | ||||
|             name='resolved_action_timeout', | ||||
|             field=models.PositiveIntegerField(default=15), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,5 +1,7 @@ | ||||
| from django.db import models | ||||
|  | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.db.models.fields import BooleanField, PositiveIntegerField | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| SEVERITY_CHOICES = [ | ||||
|     ("info", "Informational"), | ||||
| @@ -7,6 +9,13 @@ SEVERITY_CHOICES = [ | ||||
|     ("error", "Error"), | ||||
| ] | ||||
|  | ||||
| ALERT_TYPE_CHOICES = [ | ||||
|     ("availability", "Availability"), | ||||
|     ("check", "Check"), | ||||
|     ("task", "Task"), | ||||
|     ("custom", "Custom"), | ||||
| ] | ||||
|  | ||||
|  | ||||
| class Alert(models.Model): | ||||
|     agent = models.ForeignKey( | ||||
| @@ -23,21 +32,253 @@ class Alert(models.Model): | ||||
|         null=True, | ||||
|         blank=True, | ||||
|     ) | ||||
|     assigned_task = models.ForeignKey( | ||||
|         "autotasks.AutomatedTask", | ||||
|         related_name="alert", | ||||
|         on_delete=models.CASCADE, | ||||
|         null=True, | ||||
|         blank=True, | ||||
|     ) | ||||
|     alert_type = models.CharField( | ||||
|         max_length=20, choices=ALERT_TYPE_CHOICES, default="availability" | ||||
|     ) | ||||
|     message = models.TextField(null=True, blank=True) | ||||
|     alert_time = models.DateTimeField(auto_now_add=True, null=True) | ||||
|     alert_time = models.DateTimeField(auto_now_add=True, null=True, blank=True) | ||||
|     snoozed = models.BooleanField(default=False) | ||||
|     snooze_until = models.DateTimeField(null=True, blank=True) | ||||
|     resolved = models.BooleanField(default=False) | ||||
|     severity = models.CharField( | ||||
|         max_length=100, choices=SEVERITY_CHOICES, default="info" | ||||
|     resolved_on = models.DateTimeField(null=True, blank=True) | ||||
|     severity = models.CharField(max_length=30, choices=SEVERITY_CHOICES, default="info") | ||||
|     email_sent = models.DateTimeField(null=True, blank=True) | ||||
|     resolved_email_sent = models.DateTimeField(null=True, blank=True) | ||||
|     sms_sent = models.DateTimeField(null=True, blank=True) | ||||
|     resolved_sms_sent = models.DateTimeField(null=True, blank=True) | ||||
|     hidden = models.BooleanField(default=False) | ||||
|     action_run = models.DateTimeField(null=True, blank=True) | ||||
|     action_stdout = models.TextField(null=True, blank=True) | ||||
|     action_stderr = models.TextField(null=True, blank=True) | ||||
|     action_retcode = models.IntegerField(null=True, blank=True) | ||||
|     action_execution_time = models.CharField(max_length=100, null=True, blank=True) | ||||
|     resolved_action_run = models.DateTimeField(null=True, blank=True) | ||||
|     resolved_action_stdout = models.TextField(null=True, blank=True) | ||||
|     resolved_action_stderr = models.TextField(null=True, blank=True) | ||||
|     resolved_action_retcode = models.IntegerField(null=True, blank=True) | ||||
|     resolved_action_execution_time = models.CharField( | ||||
|         max_length=100, null=True, blank=True | ||||
|     ) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.message | ||||
|  | ||||
|     def resolve(self): | ||||
|         self.resolved = True | ||||
|         self.resolved_on = djangotime.now() | ||||
|         self.snoozed = False | ||||
|         self.snooze_until = None | ||||
|         self.save() | ||||
|  | ||||
|     @classmethod | ||||
|     def create_availability_alert(cls, agent): | ||||
|         pass | ||||
|      | ||||
|         if not cls.objects.filter(agent=agent, resolved=False).exists(): | ||||
|             return cls.objects.create( | ||||
|                 agent=agent, | ||||
|                 alert_type="availability", | ||||
|                 severity="error", | ||||
|                 message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is Offline.", | ||||
|                 hidden=True, | ||||
|             ) | ||||
|  | ||||
|     @classmethod | ||||
|     def create_check_alert(cls, check): | ||||
|         pass | ||||
|  | ||||
|         if not cls.objects.filter(assigned_check=check, resolved=False).exists(): | ||||
|             return cls.objects.create( | ||||
|                 assigned_check=check, | ||||
|                 alert_type="check", | ||||
|                 severity=check.alert_severity, | ||||
|                 message=f"{check.agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.", | ||||
|                 hidden=True, | ||||
|             ) | ||||
|  | ||||
|     @classmethod | ||||
|     def create_task_alert(cls, task): | ||||
|  | ||||
|         if not cls.objects.filter(assigned_task=task, resolved=False).exists(): | ||||
|             return cls.objects.create( | ||||
|                 assigned_task=task, | ||||
|                 alert_type="task", | ||||
|                 severity=task.alert_severity, | ||||
|                 message=f"{task.agent.hostname} has task: {task.name} that failed.", | ||||
|                 hidden=True, | ||||
|             ) | ||||
|  | ||||
|     @classmethod | ||||
|     def create_custom_alert(cls, custom): | ||||
|         pass | ||||
|  | ||||
|  | ||||
| class AlertTemplate(models.Model): | ||||
|     name = models.CharField(max_length=100) | ||||
|     is_active = models.BooleanField(default=True) | ||||
|  | ||||
|     action = models.ForeignKey( | ||||
|         "scripts.Script", | ||||
|         related_name="alert_template", | ||||
|         blank=True, | ||||
|         null=True, | ||||
|         on_delete=models.SET_NULL, | ||||
|     ) | ||||
|     action_args = ArrayField( | ||||
|         models.CharField(max_length=255, null=True, blank=True), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     action_timeout = models.PositiveIntegerField(default=15) | ||||
|     resolved_action = models.ForeignKey( | ||||
|         "scripts.Script", | ||||
|         related_name="resolved_alert_template", | ||||
|         blank=True, | ||||
|         null=True, | ||||
|         on_delete=models.SET_NULL, | ||||
|     ) | ||||
|     resolved_action_args = ArrayField( | ||||
|         models.CharField(max_length=255, null=True, blank=True), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     resolved_action_timeout = models.PositiveIntegerField(default=15) | ||||
|  | ||||
|     # overrides the global recipients | ||||
|     email_recipients = ArrayField( | ||||
|         models.CharField(max_length=100, blank=True), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     text_recipients = ArrayField( | ||||
|         models.CharField(max_length=100, blank=True), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|  | ||||
|     # overrides the from address | ||||
|     email_from = models.EmailField(blank=True, null=True) | ||||
|  | ||||
|     # agent alert settings | ||||
|     agent_email_on_resolved = BooleanField(null=True, blank=True, default=False) | ||||
|     agent_text_on_resolved = BooleanField(null=True, blank=True, default=False) | ||||
|     agent_include_desktops = BooleanField(null=True, blank=True, default=False) | ||||
|     agent_always_email = BooleanField(null=True, blank=True, default=False) | ||||
|     agent_always_text = BooleanField(null=True, blank=True, default=False) | ||||
|     agent_always_alert = BooleanField(null=True, blank=True, default=False) | ||||
|     agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0) | ||||
|  | ||||
|     # check alert settings | ||||
|     check_email_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     check_text_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     check_dashboard_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     check_email_on_resolved = BooleanField(null=True, blank=True, default=False) | ||||
|     check_text_on_resolved = BooleanField(null=True, blank=True, default=False) | ||||
|     check_always_email = BooleanField(null=True, blank=True, default=False) | ||||
|     check_always_text = BooleanField(null=True, blank=True, default=False) | ||||
|     check_always_alert = BooleanField(null=True, blank=True, default=False) | ||||
|     check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0) | ||||
|  | ||||
|     # task alert settings | ||||
|     task_email_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     task_text_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     task_dashboard_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     task_email_on_resolved = BooleanField(null=True, blank=True, default=False) | ||||
|     task_text_on_resolved = BooleanField(null=True, blank=True, default=False) | ||||
|     task_always_email = BooleanField(null=True, blank=True, default=False) | ||||
|     task_always_text = BooleanField(null=True, blank=True, default=False) | ||||
|     task_always_alert = BooleanField(null=True, blank=True, default=False) | ||||
|     task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0) | ||||
|  | ||||
|     excluded_sites = models.ManyToManyField( | ||||
|         "clients.Site", related_name="alert_exclusions", blank=True | ||||
|     ) | ||||
|     excluded_clients = models.ManyToManyField( | ||||
|         "clients.Client", related_name="alert_exclusions", blank=True | ||||
|     ) | ||||
|     excluded_agents = models.ManyToManyField( | ||||
|         "agents.Agent", related_name="alert_exclusions", blank=True | ||||
|     ) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.name | ||||
|  | ||||
|     @property | ||||
|     def has_agent_settings(self) -> bool: | ||||
|         return ( | ||||
|             self.agent_email_on_resolved | ||||
|             or self.agent_text_on_resolved | ||||
|             or self.agent_include_desktops | ||||
|             or self.agent_always_email | ||||
|             or self.agent_always_text | ||||
|             or self.agent_always_alert | ||||
|             or bool(self.agent_periodic_alert_days) | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def has_check_settings(self) -> bool: | ||||
|         return ( | ||||
|             bool(self.check_email_alert_severity) | ||||
|             or bool(self.check_text_alert_severity) | ||||
|             or bool(self.check_dashboard_alert_severity) | ||||
|             or self.check_email_on_resolved | ||||
|             or self.check_text_on_resolved | ||||
|             or self.check_always_email | ||||
|             or self.check_always_text | ||||
|             or self.check_always_alert | ||||
|             or bool(self.check_periodic_alert_days) | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def has_task_settings(self) -> bool: | ||||
|         return ( | ||||
|             bool(self.task_email_alert_severity) | ||||
|             or bool(self.task_text_alert_severity) | ||||
|             or bool(self.task_dashboard_alert_severity) | ||||
|             or self.task_email_on_resolved | ||||
|             or self.task_text_on_resolved | ||||
|             or self.task_always_email | ||||
|             or self.task_always_text | ||||
|             or self.task_always_alert | ||||
|             or bool(self.task_periodic_alert_days) | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def has_core_settings(self) -> bool: | ||||
|         return bool(self.email_from) or self.email_recipients or self.text_recipients | ||||
|  | ||||
|     @property | ||||
|     def is_default_template(self) -> bool: | ||||
|         return self.default_alert_template.exists() | ||||
|   | ||||
| @@ -1,19 +1,124 @@ | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| from rest_framework.serializers import ( | ||||
|     ModelSerializer, | ||||
|     ReadOnlyField, | ||||
|     DateTimeField, | ||||
| ) | ||||
|  | ||||
| from .models import Alert | ||||
| from clients.serializers import ClientSerializer, SiteSerializer | ||||
| from automation.serializers import PolicySerializer | ||||
|  | ||||
| from tacticalrmm.utils import get_default_timezone | ||||
| from .models import Alert, AlertTemplate | ||||
|  | ||||
|  | ||||
| class AlertSerializer(ModelSerializer): | ||||
|  | ||||
|     hostname = ReadOnlyField(source="agent.hostname") | ||||
|     client = ReadOnlyField(source="agent.client") | ||||
|     site = ReadOnlyField(source="agent.site") | ||||
|     alert_time = DateTimeField(format="iso-8601") | ||||
|     hostname = SerializerMethodField(read_only=True) | ||||
|     client = SerializerMethodField(read_only=True) | ||||
|     site = SerializerMethodField(read_only=True) | ||||
|     alert_time = SerializerMethodField(read_only=True) | ||||
|     resolve_on = SerializerMethodField(read_only=True) | ||||
|     snoozed_until = SerializerMethodField(read_only=True) | ||||
|  | ||||
|     def get_hostname(self, instance): | ||||
|         if instance.alert_type == "availability": | ||||
|             return instance.agent.hostname if instance.agent else "" | ||||
|         elif instance.alert_type == "check": | ||||
|             return ( | ||||
|                 instance.assigned_check.agent.hostname | ||||
|                 if instance.assigned_check | ||||
|                 else "" | ||||
|             ) | ||||
|         elif instance.alert_type == "task": | ||||
|             return ( | ||||
|                 instance.assigned_task.agent.hostname if instance.assigned_task else "" | ||||
|             ) | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def get_client(self, instance): | ||||
|         if instance.alert_type == "availability": | ||||
|             return instance.agent.client.name if instance.agent else "" | ||||
|         elif instance.alert_type == "check": | ||||
|             return ( | ||||
|                 instance.assigned_check.agent.client.name | ||||
|                 if instance.assigned_check | ||||
|                 else "" | ||||
|             ) | ||||
|         elif instance.alert_type == "task": | ||||
|             return ( | ||||
|                 instance.assigned_task.agent.client.name | ||||
|                 if instance.assigned_task | ||||
|                 else "" | ||||
|             ) | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def get_site(self, instance): | ||||
|         if instance.alert_type == "availability": | ||||
|             return instance.agent.site.name if instance.agent else "" | ||||
|         elif instance.alert_type == "check": | ||||
|             return ( | ||||
|                 instance.assigned_check.agent.site.name | ||||
|                 if instance.assigned_check | ||||
|                 else "" | ||||
|             ) | ||||
|         elif instance.alert_type == "task": | ||||
|             return ( | ||||
|                 instance.assigned_task.agent.site.name if instance.assigned_task else "" | ||||
|             ) | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def get_alert_time(self, instance): | ||||
|         if instance.alert_time: | ||||
|             return instance.alert_time.astimezone(get_default_timezone()).timestamp() | ||||
|         else: | ||||
|             return None | ||||
|  | ||||
|     def get_resolve_on(self, instance): | ||||
|         if instance.resolved_on: | ||||
|             return instance.resolved_on.astimezone(get_default_timezone()).timestamp() | ||||
|         else: | ||||
|             return None | ||||
|  | ||||
|     def get_snoozed_until(self, instance): | ||||
|         if instance.snooze_until: | ||||
|             return instance.snooze_until.astimezone(get_default_timezone()).timestamp() | ||||
|         return None | ||||
|  | ||||
|     class Meta: | ||||
|         model = Alert | ||||
|         fields = "__all__" | ||||
|         fields = "__all__" | ||||
|  | ||||
|  | ||||
| class AlertTemplateSerializer(ModelSerializer): | ||||
|     agent_settings = ReadOnlyField(source="has_agent_settings") | ||||
|     check_settings = ReadOnlyField(source="has_check_settings") | ||||
|     task_settings = ReadOnlyField(source="has_task_settings") | ||||
|     core_settings = ReadOnlyField(source="has_core_settings") | ||||
|     default_template = ReadOnlyField(source="is_default_template") | ||||
|     action_name = ReadOnlyField(source="action.name") | ||||
|     resolved_action_name = ReadOnlyField(source="resolved_action.name") | ||||
|     applied_count = SerializerMethodField() | ||||
|  | ||||
|     class Meta: | ||||
|         model = AlertTemplate | ||||
|         fields = "__all__" | ||||
|  | ||||
|     def get_applied_count(self, instance): | ||||
|         count = 0 | ||||
|         count += instance.policies.count() | ||||
|         count += instance.clients.count() | ||||
|         count += instance.sites.count() | ||||
|         return count | ||||
|  | ||||
|  | ||||
| class AlertTemplateRelationSerializer(ModelSerializer): | ||||
|     policies = PolicySerializer(read_only=True, many=True) | ||||
|     clients = ClientSerializer(read_only=True, many=True) | ||||
|     sites = SiteSerializer(read_only=True, many=True) | ||||
|  | ||||
|     class Meta: | ||||
|         model = AlertTemplate | ||||
|         fields = "__all__" | ||||
|   | ||||
							
								
								
									
										15
									
								
								api/tacticalrmm/alerts/tasks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								api/tacticalrmm/alerts/tasks.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from tacticalrmm.celery import app | ||||
|  | ||||
| from alerts.models import Alert | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def unsnooze_alerts() -> str: | ||||
|  | ||||
|     Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update( | ||||
|         snoozed=False, snooze_until=None | ||||
|     ) | ||||
|  | ||||
|     return "ok" | ||||
| @@ -1,3 +1,377 @@ | ||||
| from django.test import TestCase | ||||
| from datetime import datetime, timedelta | ||||
| from core.models import CoreSettings | ||||
|  | ||||
| # Create your tests here. | ||||
| from django.utils import timezone as djangotime | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
| from model_bakery import baker, seq | ||||
|  | ||||
| from .models import Alert, AlertTemplate | ||||
| from .serializers import ( | ||||
|     AlertSerializer, | ||||
|     AlertTemplateSerializer, | ||||
|     AlertTemplateRelationSerializer, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class TestAlertsViews(TacticalTestCase): | ||||
|     def setUp(self): | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|  | ||||
|     def test_get_alerts(self): | ||||
|         url = "/alerts/alerts/" | ||||
|  | ||||
|         # create check, task, and agent to test each serializer function | ||||
|         check = baker.make_recipe("checks.diskspace_check") | ||||
|         task = baker.make("autotasks.AutomatedTask") | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         # setup data | ||||
|         alerts = baker.make( | ||||
|             "alerts.Alert", | ||||
|             agent=agent, | ||||
|             alert_time=seq(datetime.now(), timedelta(days=15)), | ||||
|             severity="warning", | ||||
|             _quantity=3, | ||||
|         ) | ||||
|         baker.make( | ||||
|             "alerts.Alert", | ||||
|             assigned_check=check, | ||||
|             alert_time=seq(datetime.now(), timedelta(days=15)), | ||||
|             severity="error", | ||||
|             _quantity=7, | ||||
|         ) | ||||
|         baker.make( | ||||
|             "alerts.Alert", | ||||
|             assigned_task=task, | ||||
|             snoozed=True, | ||||
|             snooze_until=djangotime.now(), | ||||
|             alert_time=seq(datetime.now(), timedelta(days=15)), | ||||
|             _quantity=2, | ||||
|         ) | ||||
|         baker.make( | ||||
|             "alerts.Alert", | ||||
|             agent=agent, | ||||
|             resolved=True, | ||||
|             resolved_on=djangotime.now(), | ||||
|             alert_time=seq(datetime.now(), timedelta(days=15)), | ||||
|             _quantity=9, | ||||
|         ) | ||||
|  | ||||
|         # test top alerts for alerts icon | ||||
|         data = {"top": 3} | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEquals(resp.data["alerts"], AlertSerializer(alerts, many=True).data) | ||||
|         self.assertEquals(resp.data["alerts_count"], 10) | ||||
|  | ||||
|         # test filter data | ||||
|         # test data and result counts | ||||
|         data = [ | ||||
|             { | ||||
|                 "filter": { | ||||
|                     "timeFilter": 30, | ||||
|                     "snoozedFilter": True, | ||||
|                     "resolvedFilter": False, | ||||
|                 }, | ||||
|                 "count": 12, | ||||
|             }, | ||||
|             { | ||||
|                 "filter": { | ||||
|                     "timeFilter": 45, | ||||
|                     "snoozedFilter": False, | ||||
|                     "resolvedFilter": False, | ||||
|                 }, | ||||
|                 "count": 10, | ||||
|             }, | ||||
|             { | ||||
|                 "filter": { | ||||
|                     "severityFilter": ["error"], | ||||
|                     "snoozedFilter": False, | ||||
|                     "resolvedFilter": True, | ||||
|                     "timeFilter": 20, | ||||
|                 }, | ||||
|                 "count": 7, | ||||
|             }, | ||||
|             { | ||||
|                 "filter": { | ||||
|                     "clientFilter": [], | ||||
|                     "snoozedFilter": True, | ||||
|                     "resolvedFilter": False, | ||||
|                 }, | ||||
|                 "count": 0, | ||||
|             }, | ||||
|             {"filter": {}, "count": 21}, | ||||
|             {"filter": {"snoozedFilter": True, "resolvedFilter": False}, "count": 12}, | ||||
|         ] | ||||
|  | ||||
|         for req in data: | ||||
|             resp = self.client.patch(url, req["filter"], format="json") | ||||
|             self.assertEqual(resp.status_code, 200) | ||||
|             self.assertEqual(len(resp.data), req["count"]) | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
|  | ||||
|     def test_add_alert(self): | ||||
|         url = "/alerts/alerts/" | ||||
|  | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         data = { | ||||
|             "alert_time": datetime.now(), | ||||
|             "agent": agent.id, | ||||
|             "severity": "warning", | ||||
|             "alert_type": "availability", | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     def test_get_alert(self): | ||||
|         # returns 404 for invalid alert pk | ||||
|         resp = self.client.get("/alerts/alerts/500/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         alert = baker.make("alerts.Alert") | ||||
|         url = f"/alerts/alerts/{alert.pk}/" | ||||
|  | ||||
|         resp = self.client.get(url, format="json") | ||||
|         serializer = AlertSerializer(alert) | ||||
|  | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_update_alert(self): | ||||
|         # returns 404 for invalid alert pk | ||||
|         resp = self.client.put("/alerts/alerts/500/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         alert = baker.make("alerts.Alert", resolved=False, snoozed=False) | ||||
|  | ||||
|         url = f"/alerts/alerts/{alert.pk}/" | ||||
|  | ||||
|         # test resolving alert | ||||
|         data = { | ||||
|             "id": alert.pk, | ||||
|             "type": "resolve", | ||||
|         } | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertTrue(Alert.objects.get(pk=alert.pk).resolved) | ||||
|         self.assertTrue(Alert.objects.get(pk=alert.pk).resolved_on) | ||||
|  | ||||
|         # test snoozing alert | ||||
|         data = {"id": alert.pk, "type": "snooze", "snooze_days": "30"} | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertTrue(Alert.objects.get(pk=alert.pk).snoozed) | ||||
|         self.assertTrue(Alert.objects.get(pk=alert.pk).snooze_until) | ||||
|  | ||||
|         # test snoozing alert without snooze_days | ||||
|         data = {"id": alert.pk, "type": "snooze"} | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # test unsnoozing alert | ||||
|         data = {"id": alert.pk, "type": "unsnooze"} | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertFalse(Alert.objects.get(pk=alert.pk).snoozed) | ||||
|         self.assertFalse(Alert.objects.get(pk=alert.pk).snooze_until) | ||||
|  | ||||
|         # test invalid type | ||||
|         data = {"id": alert.pk, "type": "invalid"} | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         self.check_not_authenticated("put", url) | ||||
|  | ||||
|     def test_delete_alert(self): | ||||
|         # returns 404 for invalid alert pk | ||||
|         resp = self.client.put("/alerts/alerts/500/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         alert = baker.make("alerts.Alert") | ||||
|  | ||||
|         # test delete alert | ||||
|         url = f"/alerts/alerts/{alert.pk}/" | ||||
|         resp = self.client.delete(url, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         self.assertFalse(Alert.objects.filter(pk=alert.pk).exists()) | ||||
|         self.check_not_authenticated("delete", url) | ||||
|  | ||||
|     def test_bulk_alert_actions(self): | ||||
|         url = "/alerts/bulk/" | ||||
|  | ||||
|         # setup data | ||||
|         alerts = baker.make("alerts.Alert", resolved=False, _quantity=3) | ||||
|  | ||||
|         # test invalid data | ||||
|         data = {"bulk_action": "invalid"} | ||||
|         resp = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # test snooze without snooze days | ||||
|         data = {"bulk_action": "snooze"} | ||||
|         resp = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # test bulk snoozing alerts | ||||
|         data = { | ||||
|             "bulk_action": "snooze", | ||||
|             "alerts": [alert.pk for alert in alerts], | ||||
|             "snooze_days": "30", | ||||
|         } | ||||
|         resp = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertFalse(Alert.objects.filter(snoozed=False).exists()) | ||||
|  | ||||
|         # test bulk resolving alerts | ||||
|         data = {"bulk_action": "resolve", "alerts": [alert.pk for alert in alerts]} | ||||
|  | ||||
|         resp = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertFalse(Alert.objects.filter(resolved=False).exists()) | ||||
|         self.assertTrue(Alert.objects.filter(snoozed=False).exists()) | ||||
|  | ||||
|     def test_get_alert_templates(self): | ||||
|         url = "/alerts/alerttemplates/" | ||||
|  | ||||
|         alert_templates = baker.make("alerts.AlertTemplate", _quantity=3) | ||||
|         resp = self.client.get(url, format="json") | ||||
|         serializer = AlertTemplateSerializer(alert_templates, many=True) | ||||
|  | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_add_alert_template(self): | ||||
|         url = "/alerts/alerttemplates/" | ||||
|  | ||||
|         data = { | ||||
|             "name": "Test Template", | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     def test_get_alert_template(self): | ||||
|         # returns 404 for invalid alert template pk | ||||
|         resp = self.client.get("/alerts/alerttemplates/500/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         alert_template = baker.make("alerts.AlertTemplate") | ||||
|         url = f"/alerts/alerttemplates/{alert_template.pk}/" | ||||
|  | ||||
|         resp = self.client.get(url, format="json") | ||||
|         serializer = AlertTemplateSerializer(alert_template) | ||||
|  | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_update_alert_template(self): | ||||
|         # returns 404 for invalid alert pk | ||||
|         resp = self.client.put("/alerts/alerttemplates/500/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         alert_template = baker.make("alerts.AlertTemplate") | ||||
|  | ||||
|         url = f"/alerts/alerttemplates/{alert_template.pk}/" | ||||
|  | ||||
|         # test data | ||||
|         data = { | ||||
|             "id": alert_template.pk, | ||||
|             "agent_email_on_resolved": True, | ||||
|             "agent_text_on_resolved": True, | ||||
|             "agent_include_desktops": True, | ||||
|             "agent_always_email": True, | ||||
|             "agent_always_text": True, | ||||
|             "agent_always_alert": True, | ||||
|             "agent_periodic_alert_days": "90", | ||||
|         } | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         self.check_not_authenticated("put", url) | ||||
|  | ||||
|     def test_delete_alert_template(self): | ||||
|         # returns 404 for invalid alert pk | ||||
|         resp = self.client.put("/alerts/alerttemplates/500/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         alert_template = baker.make("alerts.AlertTemplate") | ||||
|  | ||||
|         # test delete alert | ||||
|         url = f"/alerts/alerttemplates/{alert_template.pk}/" | ||||
|         resp = self.client.delete(url, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         self.assertFalse(AlertTemplate.objects.filter(pk=alert_template.pk).exists()) | ||||
|  | ||||
|         self.check_not_authenticated("delete", url) | ||||
|  | ||||
|     def test_alert_template_related(self): | ||||
|         # setup data | ||||
|         alert_template = baker.make("alerts.AlertTemplate") | ||||
|         baker.make("clients.Client", alert_template=alert_template, _quantity=2) | ||||
|         baker.make("clients.Site", alert_template=alert_template, _quantity=3) | ||||
|         baker.make("automation.Policy", alert_template=alert_template) | ||||
|         core = CoreSettings.objects.first() | ||||
|         core.alert_template = alert_template | ||||
|         core.save() | ||||
|  | ||||
|         url = f"/alerts/alerttemplates/{alert_template.pk}/related/" | ||||
|  | ||||
|         resp = self.client.get(url, format="json") | ||||
|         serializer = AlertTemplateRelationSerializer(alert_template) | ||||
|  | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|         self.assertEqual(len(resp.data["policies"]), 1) | ||||
|         self.assertEqual(len(resp.data["clients"]), 2) | ||||
|         self.assertEqual(len(resp.data["sites"]), 3) | ||||
|         self.assertTrue( | ||||
|             AlertTemplate.objects.get(pk=alert_template.pk).is_default_template | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class TestAlertTasks(TacticalTestCase): | ||||
|     def test_unsnooze_alert_task(self): | ||||
|         from alerts.tasks import unsnooze_alerts | ||||
|  | ||||
|         # these will be unsnoozed whent eh function is run | ||||
|         not_snoozed = baker.make( | ||||
|             "alerts.Alert", | ||||
|             snoozed=True, | ||||
|             snooze_until=seq(datetime.now(), timedelta(days=15)), | ||||
|             _quantity=5, | ||||
|         ) | ||||
|  | ||||
|         # these will still be snoozed after the function is run | ||||
|         snoozed = baker.make( | ||||
|             "alerts.Alert", | ||||
|             snoozed=True, | ||||
|             snooze_until=seq(datetime.now(), timedelta(days=-15)), | ||||
|             _quantity=5, | ||||
|         ) | ||||
|  | ||||
|         unsnooze_alerts() | ||||
|  | ||||
|         self.assertFalse( | ||||
|             Alert.objects.filter( | ||||
|                 pk__in=[alert.pk for alert in not_snoozed], snoozed=False | ||||
|             ).exists() | ||||
|         ) | ||||
|         self.assertTrue( | ||||
|             Alert.objects.filter( | ||||
|                 pk__in=[alert.pk for alert in snoozed], snoozed=False | ||||
|             ).exists() | ||||
|         ) | ||||
|   | ||||
| @@ -3,5 +3,9 @@ from . import views | ||||
|  | ||||
| urlpatterns = [ | ||||
|     path("alerts/", views.GetAddAlerts.as_view()), | ||||
|     path("bulk/", views.BulkAlerts.as_view()), | ||||
|     path("alerts/<int:pk>/", views.GetUpdateDeleteAlert.as_view()), | ||||
|     path("alerttemplates/", views.GetAddAlertTemplates.as_view()), | ||||
|     path("alerttemplates/<int:pk>/", views.GetUpdateDeleteAlertTemplate.as_view()), | ||||
|     path("alerttemplates/<int:pk>/related/", views.RelatedAlertTemplate.as_view()), | ||||
| ] | ||||
|   | ||||
| @@ -1,19 +1,103 @@ | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.db.models import Q | ||||
| from datetime import datetime as dt | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from tacticalrmm.utils import notify_error | ||||
| from rest_framework.views import APIView | ||||
| from rest_framework.response import Response | ||||
| from rest_framework import status | ||||
|  | ||||
| from .models import Alert | ||||
| from .models import Alert, AlertTemplate | ||||
|  | ||||
| from .serializers import AlertSerializer | ||||
| from .serializers import ( | ||||
|     AlertSerializer, | ||||
|     AlertTemplateSerializer, | ||||
|     AlertTemplateRelationSerializer, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class GetAddAlerts(APIView): | ||||
|     def get(self, request): | ||||
|         alerts = Alert.objects.all() | ||||
|     def patch(self, request): | ||||
|  | ||||
|         return Response(AlertSerializer(alerts, many=True).data) | ||||
|         # top 10 alerts for dashboard icon | ||||
|         if "top" in request.data.keys(): | ||||
|             alerts = Alert.objects.filter( | ||||
|                 resolved=False, snoozed=False, hidden=False | ||||
|             ).order_by("alert_time")[: int(request.data["top"])] | ||||
|             count = Alert.objects.filter( | ||||
|                 resolved=False, snoozed=False, hidden=False | ||||
|             ).count() | ||||
|             return Response( | ||||
|                 { | ||||
|                     "alerts_count": count, | ||||
|                     "alerts": AlertSerializer(alerts, many=True).data, | ||||
|                 } | ||||
|             ) | ||||
|  | ||||
|         elif any( | ||||
|             key | ||||
|             in [ | ||||
|                 "timeFilter", | ||||
|                 "clientFilter", | ||||
|                 "severityFilter", | ||||
|                 "resolvedFilter", | ||||
|                 "snoozedFilter", | ||||
|             ] | ||||
|             for key in request.data.keys() | ||||
|         ): | ||||
|             clientFilter = Q() | ||||
|             severityFilter = Q() | ||||
|             timeFilter = Q() | ||||
|             resolvedFilter = Q() | ||||
|             snoozedFilter = Q() | ||||
|  | ||||
|             if ( | ||||
|                 "snoozedFilter" in request.data.keys() | ||||
|                 and not request.data["snoozedFilter"] | ||||
|             ): | ||||
|                 snoozedFilter = Q(snoozed=request.data["snoozedFilter"]) | ||||
|  | ||||
|             if ( | ||||
|                 "resolvedFilter" in request.data.keys() | ||||
|                 and not request.data["resolvedFilter"] | ||||
|             ): | ||||
|                 resolvedFilter = Q(resolved=request.data["resolvedFilter"]) | ||||
|  | ||||
|             if "clientFilter" in request.data.keys(): | ||||
|                 from agents.models import Agent | ||||
|                 from clients.models import Client | ||||
|  | ||||
|                 clients = Client.objects.filter( | ||||
|                     pk__in=request.data["clientFilter"] | ||||
|                 ).values_list("id") | ||||
|                 agents = Agent.objects.filter(site__client_id__in=clients).values_list( | ||||
|                     "id" | ||||
|                 ) | ||||
|  | ||||
|                 clientFilter = Q(agent__in=agents) | ||||
|  | ||||
|             if "severityFilter" in request.data.keys(): | ||||
|                 severityFilter = Q(severity__in=request.data["severityFilter"]) | ||||
|  | ||||
|             if "timeFilter" in request.data.keys(): | ||||
|                 timeFilter = Q( | ||||
|                     alert_time__lte=djangotime.make_aware(dt.today()), | ||||
|                     alert_time__gt=djangotime.make_aware(dt.today()) | ||||
|                     - djangotime.timedelta(days=int(request.data["timeFilter"])), | ||||
|                 ) | ||||
|  | ||||
|             alerts = ( | ||||
|                 Alert.objects.filter(clientFilter) | ||||
|                 .filter(severityFilter) | ||||
|                 .filter(resolvedFilter) | ||||
|                 .filter(snoozedFilter) | ||||
|                 .filter(timeFilter) | ||||
|             ) | ||||
|             return Response(AlertSerializer(alerts, many=True).data) | ||||
|  | ||||
|         else: | ||||
|             alerts = Alert.objects.all() | ||||
|             return Response(AlertSerializer(alerts, many=True).data) | ||||
|  | ||||
|     def post(self, request): | ||||
|         serializer = AlertSerializer(data=request.data, partial=True) | ||||
| @@ -32,7 +116,40 @@ class GetUpdateDeleteAlert(APIView): | ||||
|     def put(self, request, pk): | ||||
|         alert = get_object_or_404(Alert, pk=pk) | ||||
|  | ||||
|         serializer = AlertSerializer(instance=alert, data=request.data, partial=True) | ||||
|         data = request.data | ||||
|  | ||||
|         if "type" in data.keys(): | ||||
|             if data["type"] == "resolve": | ||||
|                 data = { | ||||
|                     "resolved": True, | ||||
|                     "resolved_on": djangotime.now(), | ||||
|                     "snoozed": False, | ||||
|                 } | ||||
|  | ||||
|                 # unable to set snooze_until to none in serialzier | ||||
|                 alert.snooze_until = None | ||||
|                 alert.save() | ||||
|             elif data["type"] == "snooze": | ||||
|                 if "snooze_days" in data.keys(): | ||||
|                     data = { | ||||
|                         "snoozed": True, | ||||
|                         "snooze_until": djangotime.now() | ||||
|                         + djangotime.timedelta(days=int(data["snooze_days"])), | ||||
|                     } | ||||
|                 else: | ||||
|                     return notify_error( | ||||
|                         "Missing 'snoozed_days' when trying to snooze alert" | ||||
|                     ) | ||||
|             elif data["type"] == "unsnooze": | ||||
|                 data = {"snoozed": False} | ||||
|  | ||||
|                 # unable to set snooze_until to none in serialzier | ||||
|                 alert.snooze_until = None | ||||
|                 alert.save() | ||||
|             else: | ||||
|                 return notify_error("There was an error in the request data") | ||||
|  | ||||
|         serializer = AlertSerializer(instance=alert, data=data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save() | ||||
|  | ||||
| @@ -42,3 +159,68 @@ class GetUpdateDeleteAlert(APIView): | ||||
|         Alert.objects.get(pk=pk).delete() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class BulkAlerts(APIView): | ||||
|     def post(self, request): | ||||
|         if request.data["bulk_action"] == "resolve": | ||||
|             Alert.objects.filter(id__in=request.data["alerts"]).update( | ||||
|                 resolved=True, | ||||
|                 resolved_on=djangotime.now(), | ||||
|                 snoozed=False, | ||||
|                 snooze_until=None, | ||||
|             ) | ||||
|             return Response("ok") | ||||
|         elif request.data["bulk_action"] == "snooze": | ||||
|             if "snooze_days" in request.data.keys(): | ||||
|                 Alert.objects.filter(id__in=request.data["alerts"]).update( | ||||
|                     snoozed=True, | ||||
|                     snooze_until=djangotime.now() | ||||
|                     + djangotime.timedelta(days=int(request.data["snooze_days"])), | ||||
|                 ) | ||||
|                 return Response("ok") | ||||
|  | ||||
|         return notify_error("The request was invalid") | ||||
|  | ||||
|  | ||||
| class GetAddAlertTemplates(APIView): | ||||
|     def get(self, request): | ||||
|         alert_templates = AlertTemplate.objects.all() | ||||
|  | ||||
|         return Response(AlertTemplateSerializer(alert_templates, many=True).data) | ||||
|  | ||||
|     def post(self, request): | ||||
|         serializer = AlertTemplateSerializer(data=request.data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class GetUpdateDeleteAlertTemplate(APIView): | ||||
|     def get(self, request, pk): | ||||
|         alert_template = get_object_or_404(AlertTemplate, pk=pk) | ||||
|  | ||||
|         return Response(AlertTemplateSerializer(alert_template).data) | ||||
|  | ||||
|     def put(self, request, pk): | ||||
|         alert_template = get_object_or_404(AlertTemplate, pk=pk) | ||||
|  | ||||
|         serializer = AlertTemplateSerializer( | ||||
|             instance=alert_template, data=request.data, partial=True | ||||
|         ) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|     def delete(self, request, pk): | ||||
|         get_object_or_404(AlertTemplate, pk=pk).delete() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class RelatedAlertTemplate(APIView): | ||||
|     def get(self, request, pk): | ||||
|         alert_template = get_object_or_404(AlertTemplate, pk=pk) | ||||
|         return Response(AlertTemplateRelationSerializer(alert_template).data) | ||||
|   | ||||
| @@ -1,5 +0,0 @@ | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| class Apiv2Config(AppConfig): | ||||
|     name = 'apiv2' | ||||
| @@ -1,38 +0,0 @@ | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
| from unittest.mock import patch | ||||
| from model_bakery import baker | ||||
| from itertools import cycle | ||||
|  | ||||
|  | ||||
| class TestAPIv2(TacticalTestCase): | ||||
|     def setUp(self): | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|  | ||||
|     @patch("agents.models.Agent.salt_api_cmd") | ||||
|     def test_sync_modules(self, mock_ret): | ||||
|         # setup data | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         url = "/api/v2/saltminion/" | ||||
|         payload = {"agent_id": agent.agent_id} | ||||
|  | ||||
|         mock_ret.return_value = "error" | ||||
|         r = self.client.patch(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|  | ||||
|         mock_ret.return_value = [] | ||||
|         r = self.client.patch(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data, "Modules are already in sync") | ||||
|  | ||||
|         mock_ret.return_value = ["modules.win_agent"] | ||||
|         r = self.client.patch(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data, "Successfully synced salt modules") | ||||
|  | ||||
|         mock_ret.return_value = ["askdjaskdjasd", "modules.win_agent"] | ||||
|         r = self.client.patch(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data, "Successfully synced salt modules") | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
| @@ -1,14 +0,0 @@ | ||||
| from django.urls import path | ||||
| from . import views | ||||
| from apiv3 import views as v3_views | ||||
|  | ||||
| urlpatterns = [ | ||||
|     path("newagent/", v3_views.NewAgent.as_view()), | ||||
|     path("meshexe/", v3_views.MeshExe.as_view()), | ||||
|     path("saltminion/", v3_views.SaltMinion.as_view()), | ||||
|     path("<str:agentid>/saltminion/", v3_views.SaltMinion.as_view()), | ||||
|     path("sysinfo/", v3_views.SysInfo.as_view()), | ||||
|     path("hello/", v3_views.Hello.as_view()), | ||||
|     path("checkrunner/", views.CheckRunner.as_view()), | ||||
|     path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()), | ||||
| ] | ||||
| @@ -1,41 +0,0 @@ | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from rest_framework.authentication import TokenAuthentication | ||||
| from rest_framework.permissions import IsAuthenticated | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from agents.models import Agent | ||||
| from checks.models import Check | ||||
|  | ||||
| from checks.serializers import CheckRunnerGetSerializerV2 | ||||
|  | ||||
|  | ||||
| class CheckRunner(APIView): | ||||
|     """ | ||||
|     For the windows python agent | ||||
|     """ | ||||
|  | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def get(self, request, agentid): | ||||
|         agent = get_object_or_404(Agent, agent_id=agentid) | ||||
|         agent.last_seen = djangotime.now() | ||||
|         agent.save(update_fields=["last_seen"]) | ||||
|         checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False) | ||||
|  | ||||
|         ret = { | ||||
|             "agent": agent.pk, | ||||
|             "check_interval": agent.check_interval, | ||||
|             "checks": CheckRunnerGetSerializerV2(checks, many=True).data, | ||||
|         } | ||||
|         return Response(ret) | ||||
|  | ||||
|     def patch(self, request): | ||||
|         check = get_object_or_404(Check, pk=request.data["id"]) | ||||
|         check.last_run = djangotime.now() | ||||
|         check.save(update_fields=["last_run"]) | ||||
|         status = check.handle_checkv2(request.data) | ||||
|         return Response(status) | ||||
| @@ -26,42 +26,10 @@ class TestAPIv3(TacticalTestCase): | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_get_salt_minion(self): | ||||
|         url = f"/api/v3/{self.agent.agent_id}/saltminion/" | ||||
|         url2 = f"/api/v2/{self.agent.agent_id}/saltminion/" | ||||
|  | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertIn("latestVer", r.json().keys()) | ||||
|         self.assertIn("currentVer", r.json().keys()) | ||||
|         self.assertIn("salt_id", r.json().keys()) | ||||
|         self.assertIn("downloadURL", r.json().keys()) | ||||
|  | ||||
|         r2 = self.client.get(url2) | ||||
|         self.assertEqual(r2.status_code, 200) | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|         self.check_not_authenticated("get", url2) | ||||
|  | ||||
|     def test_get_mesh_info(self): | ||||
|         url = f"/api/v3/{self.agent.pk}/meshinfo/" | ||||
|  | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_get_winupdater(self): | ||||
|         url = f"/api/v3/{self.agent.agent_id}/winupdater/" | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_sysinfo(self): | ||||
|         # TODO replace this with golang wmi sample data | ||||
|  | ||||
|         url = f"/api/v3/sysinfo/" | ||||
|         url = "/api/v3/sysinfo/" | ||||
|         with open( | ||||
|             os.path.join( | ||||
|                 settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json" | ||||
| @@ -76,19 +44,11 @@ class TestAPIv3(TacticalTestCase): | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
|  | ||||
|     def test_hello_patch(self): | ||||
|         url = f"/api/v3/hello/" | ||||
|         payload = { | ||||
|             "agent_id": self.agent.agent_id, | ||||
|             "logged_in_username": "None", | ||||
|             "disks": [], | ||||
|         } | ||||
|  | ||||
|         r = self.client.patch(url, payload, format="json") | ||||
|     def test_checkrunner_interval(self): | ||||
|         url = f"/api/v3/{self.agent.agent_id}/checkinterval/" | ||||
|         r = self.client.get(url, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         payload["logged_in_username"] = "Bob" | ||||
|         r = self.client.patch(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
|         self.assertEqual( | ||||
|             r.json(), | ||||
|             {"agent": self.agent.pk, "check_interval": self.agent.check_interval}, | ||||
|         ) | ||||
|   | ||||
| @@ -2,18 +2,18 @@ from django.urls import path | ||||
| from . import views | ||||
|  | ||||
| urlpatterns = [ | ||||
|     path("hello/", views.Hello.as_view()), | ||||
|     path("checkrunner/", views.CheckRunner.as_view()), | ||||
|     path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()), | ||||
|     path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()), | ||||
|     path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()), | ||||
|     path("saltminion/", views.SaltMinion.as_view()), | ||||
|     path("<str:agentid>/saltminion/", views.SaltMinion.as_view()), | ||||
|     path("<int:pk>/meshinfo/", views.MeshInfo.as_view()), | ||||
|     path("meshexe/", views.MeshExe.as_view()), | ||||
|     path("sysinfo/", views.SysInfo.as_view()), | ||||
|     path("newagent/", views.NewAgent.as_view()), | ||||
|     path("winupdater/", views.WinUpdater.as_view()), | ||||
|     path("<str:agentid>/winupdater/", views.WinUpdater.as_view()), | ||||
|     path("software/", views.Software.as_view()), | ||||
|     path("installer/", views.Installer.as_view()), | ||||
|     path("checkin/", views.CheckIn.as_view()), | ||||
|     path("syncmesh/", views.SyncMeshNodeID.as_view()), | ||||
|     path("choco/", views.Choco.as_view()), | ||||
|     path("winupdates/", views.WinUpdates.as_view()), | ||||
|     path("superseded/", views.SupersededWinUpdate.as_view()), | ||||
| ] | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| import asyncio | ||||
| import os | ||||
| import requests | ||||
| import time | ||||
| from loguru import logger | ||||
| from packaging import version as pyver | ||||
|  | ||||
| @@ -17,54 +17,75 @@ from rest_framework.authtoken.models import Token | ||||
|  | ||||
| from agents.models import Agent | ||||
| from checks.models import Check | ||||
| from checks.utils import bytes2human | ||||
| from autotasks.models import AutomatedTask | ||||
| from accounts.models import User | ||||
| from winupdate.models import WinUpdatePolicy | ||||
| from winupdate.models import WinUpdate, WinUpdatePolicy | ||||
| from software.models import InstalledSoftware | ||||
| from checks.serializers import CheckRunnerGetSerializerV3 | ||||
| from agents.serializers import WinAgentSerializer | ||||
| from checks.serializers import CheckRunnerGetSerializer | ||||
| from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer | ||||
| from winupdate.serializers import ApprovedUpdateSerializer | ||||
| from agents.serializers import WinAgentSerializer | ||||
|  | ||||
| from agents.tasks import ( | ||||
|     agent_recovery_email_task, | ||||
|     agent_recovery_sms_task, | ||||
|     sync_salt_modules_task, | ||||
| ) | ||||
| from winupdate.tasks import check_for_updates_task | ||||
| from software.tasks import install_chocolatey | ||||
| from checks.utils import bytes2human | ||||
| from tacticalrmm.utils import notify_error, reload_nats, filter_software, SoftwareList | ||||
|  | ||||
| logger.configure(**settings.LOG_CONFIG) | ||||
|  | ||||
|  | ||||
| class Hello(APIView): | ||||
|     """ | ||||
|     The agent's checkin endpoint | ||||
|     patch: called every 30 to 120 seconds | ||||
|     post: called on agent windows service startup | ||||
|     """ | ||||
| class CheckIn(APIView): | ||||
|  | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def patch(self, request): | ||||
|         updated = False | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         if pyver.parse(request.data["version"]) > pyver.parse( | ||||
|             agent.version | ||||
|         ) or pyver.parse(request.data["version"]) == pyver.parse( | ||||
|             settings.LATEST_AGENT_VER | ||||
|         ): | ||||
|             updated = True | ||||
|         agent.version = request.data["version"] | ||||
|         agent.last_seen = djangotime.now() | ||||
|         agent.save(update_fields=["version", "last_seen"]) | ||||
|  | ||||
|         # change agent update pending status to completed if agent has just updated | ||||
|         if ( | ||||
|             updated | ||||
|             and agent.pendingactions.filter( | ||||
|                 action_type="agentupdate", status="pending" | ||||
|             ).exists() | ||||
|         ): | ||||
|             agent.pendingactions.filter( | ||||
|                 action_type="agentupdate", status="pending" | ||||
|             ).update(status="completed") | ||||
|  | ||||
|         # handles any alerting actions | ||||
|         agent.handle_alert(checkin=True) | ||||
|  | ||||
|         recovery = agent.recoveryactions.filter(last_run=None).last() | ||||
|         if recovery is not None: | ||||
|             recovery.last_run = djangotime.now() | ||||
|             recovery.save(update_fields=["last_run"]) | ||||
|             handle_agent_recovery_task.delay(pk=recovery.pk) | ||||
|             return Response("ok") | ||||
|  | ||||
|         # get any pending actions | ||||
|         if agent.pendingactions.filter(status="pending").exists(): | ||||
|             agent.handle_pending_actions() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|     def put(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|  | ||||
|         disks = request.data["disks"] | ||||
|         new = [] | ||||
|         # python agent | ||||
|         if isinstance(disks, dict): | ||||
|             for k, v in disks.items(): | ||||
|                 new.append(v) | ||||
|         else: | ||||
|             # golang agent | ||||
|         if request.data["func"] == "disks": | ||||
|             disks = request.data["disks"] | ||||
|             new = [] | ||||
|             for disk in disks: | ||||
|                 tmp = {} | ||||
|                 for k, v in disk.items(): | ||||
|                 for _, _ in disk.items(): | ||||
|                     tmp["device"] = disk["device"] | ||||
|                     tmp["fstype"] = disk["fstype"] | ||||
|                     tmp["total"] = bytes2human(disk["total"]) | ||||
| @@ -73,62 +94,173 @@ class Hello(APIView): | ||||
|                     tmp["percent"] = int(disk["percent"]) | ||||
|                 new.append(tmp) | ||||
|  | ||||
|         if request.data["logged_in_username"] == "None": | ||||
|             serializer.save(last_seen=djangotime.now(), disks=new) | ||||
|         else: | ||||
|             serializer.save( | ||||
|                 last_seen=djangotime.now(), | ||||
|                 disks=new, | ||||
|                 last_logged_in_user=request.data["logged_in_username"], | ||||
|             serializer.is_valid(raise_exception=True) | ||||
|             serializer.save(disks=new) | ||||
|             return Response("ok") | ||||
|  | ||||
|         if request.data["func"] == "loggedonuser": | ||||
|             if request.data["logged_in_username"] != "None": | ||||
|                 serializer.is_valid(raise_exception=True) | ||||
|                 serializer.save(last_logged_in_user=request.data["logged_in_username"]) | ||||
|                 return Response("ok") | ||||
|  | ||||
|         if request.data["func"] == "software": | ||||
|             raw: SoftwareList = request.data["software"] | ||||
|             if not isinstance(raw, list): | ||||
|                 return notify_error("err") | ||||
|  | ||||
|             sw = filter_software(raw) | ||||
|             if not InstalledSoftware.objects.filter(agent=agent).exists(): | ||||
|                 InstalledSoftware(agent=agent, software=sw).save() | ||||
|             else: | ||||
|                 s = agent.installedsoftware_set.first() | ||||
|                 s.software = sw | ||||
|                 s.save(update_fields=["software"]) | ||||
|  | ||||
|             return Response("ok") | ||||
|  | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save() | ||||
|         return Response("ok") | ||||
|  | ||||
|     # called once during tacticalagent windows service startup | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         if not agent.choco_installed: | ||||
|             asyncio.run(agent.nats_cmd({"func": "installchoco"}, wait=False)) | ||||
|  | ||||
|         time.sleep(0.5) | ||||
|         asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False)) | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class SyncMeshNodeID(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         if agent.mesh_node_id != request.data["nodeid"]: | ||||
|             agent.mesh_node_id = request.data["nodeid"] | ||||
|             agent.save(update_fields=["mesh_node_id"]) | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class Choco(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         agent.choco_installed = request.data["installed"] | ||||
|         agent.save(update_fields=["choco_installed"]) | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class WinUpdates(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def put(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         reboot_policy: str = agent.get_patch_policy().reboot_after_install | ||||
|         reboot = False | ||||
|  | ||||
|         if reboot_policy == "always": | ||||
|             reboot = True | ||||
|  | ||||
|         if request.data["needs_reboot"]: | ||||
|             if reboot_policy == "required": | ||||
|                 reboot = True | ||||
|             elif reboot_policy == "never": | ||||
|                 agent.needs_reboot = True | ||||
|                 agent.save(update_fields=["needs_reboot"]) | ||||
|  | ||||
|         if reboot: | ||||
|             asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False)) | ||||
|             logger.info(f"{agent.hostname} is rebooting after updates were installed.") | ||||
|  | ||||
|         agent.delete_superseded_updates() | ||||
|         return Response("ok") | ||||
|  | ||||
|     def patch(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         u = agent.winupdates.filter(guid=request.data["guid"]).last() | ||||
|         success: bool = request.data["success"] | ||||
|         if success: | ||||
|             u.result = "success" | ||||
|             u.downloaded = True | ||||
|             u.installed = True | ||||
|             u.date_installed = djangotime.now() | ||||
|             u.save( | ||||
|                 update_fields=[ | ||||
|                     "result", | ||||
|                     "downloaded", | ||||
|                     "installed", | ||||
|                     "date_installed", | ||||
|                 ] | ||||
|             ) | ||||
|         else: | ||||
|             u.result = "failed" | ||||
|             u.save(update_fields=["result"]) | ||||
|  | ||||
|         if agent.agentoutages.exists() and agent.agentoutages.last().is_active: | ||||
|             last_outage = agent.agentoutages.last() | ||||
|             last_outage.recovery_time = djangotime.now() | ||||
|             last_outage.save(update_fields=["recovery_time"]) | ||||
|  | ||||
|             if agent.overdue_email_alert: | ||||
|                 agent_recovery_email_task.delay(pk=last_outage.pk) | ||||
|             if agent.overdue_text_alert: | ||||
|                 agent_recovery_sms_task.delay(pk=last_outage.pk) | ||||
|  | ||||
|         recovery = agent.recoveryactions.filter(last_run=None).last() | ||||
|         if recovery is not None: | ||||
|             recovery.last_run = djangotime.now() | ||||
|             recovery.save(update_fields=["last_run"]) | ||||
|             return Response(recovery.send()) | ||||
|  | ||||
|         # handle agent update | ||||
|         if agent.pendingactions.filter( | ||||
|             action_type="agentupdate", status="pending" | ||||
|         ).exists(): | ||||
|             update = agent.pendingactions.filter( | ||||
|                 action_type="agentupdate", status="pending" | ||||
|             ).last() | ||||
|             update.status = "completed" | ||||
|             update.save(update_fields=["status"]) | ||||
|             return Response(update.details) | ||||
|  | ||||
|         # get any pending actions | ||||
|         if agent.pendingactions.filter(status="pending").exists(): | ||||
|             agent.handle_pending_actions() | ||||
|  | ||||
|         agent.delete_superseded_updates() | ||||
|         return Response("ok") | ||||
|  | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         updates = request.data["wua_updates"] | ||||
|         for update in updates: | ||||
|             if agent.winupdates.filter(guid=update["guid"]).exists(): | ||||
|                 u = agent.winupdates.filter(guid=update["guid"]).last() | ||||
|                 u.downloaded = update["downloaded"] | ||||
|                 u.installed = update["installed"] | ||||
|                 u.save(update_fields=["downloaded", "installed"]) | ||||
|             else: | ||||
|                 try: | ||||
|                     kb = "KB" + update["kb_article_ids"][0] | ||||
|                 except: | ||||
|                     continue | ||||
|  | ||||
|         serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save(last_seen=djangotime.now()) | ||||
|                 WinUpdate( | ||||
|                     agent=agent, | ||||
|                     guid=update["guid"], | ||||
|                     kb=kb, | ||||
|                     title=update["title"], | ||||
|                     installed=update["installed"], | ||||
|                     downloaded=update["downloaded"], | ||||
|                     description=update["description"], | ||||
|                     severity=update["severity"], | ||||
|                     categories=update["categories"], | ||||
|                     category_ids=update["category_ids"], | ||||
|                     kb_article_ids=update["kb_article_ids"], | ||||
|                     more_info_urls=update["more_info_urls"], | ||||
|                     support_url=update["support_url"], | ||||
|                     revision_number=update["revision_number"], | ||||
|                 ).save() | ||||
|  | ||||
|         sync_salt_modules_task.delay(agent.pk) | ||||
|         check_for_updates_task.apply_async( | ||||
|             queue="wupdate", kwargs={"pk": agent.pk, "wait": True} | ||||
|         ) | ||||
|         agent.delete_superseded_updates() | ||||
|  | ||||
|         if not agent.choco_installed: | ||||
|             install_chocolatey.delay(agent.pk, wait=True) | ||||
|         # more superseded updates cleanup | ||||
|         if pyver.parse(agent.version) <= pyver.parse("1.4.2"): | ||||
|             for u in agent.winupdates.filter( | ||||
|                 date_installed__isnull=True, result="failed" | ||||
|             ).exclude(installed=True): | ||||
|                 u.delete() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class SupersededWinUpdate(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         updates = agent.winupdates.filter(guid=request.data["guid"]) | ||||
|         for u in updates: | ||||
|             u.delete() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
| @@ -148,31 +280,28 @@ class CheckRunner(APIView): | ||||
|         ret = { | ||||
|             "agent": agent.pk, | ||||
|             "check_interval": agent.check_interval, | ||||
|             "checks": CheckRunnerGetSerializerV3(checks, many=True).data, | ||||
|             "checks": CheckRunnerGetSerializer(checks, many=True).data, | ||||
|         } | ||||
|         return Response(ret) | ||||
|  | ||||
|     def patch(self, request): | ||||
|         from logs.models import AuditLog | ||||
|  | ||||
|         check = get_object_or_404(Check, pk=request.data["id"]) | ||||
|         check.last_run = djangotime.now() | ||||
|         check.save(update_fields=["last_run"]) | ||||
|         status = check.handle_checkv2(request.data) | ||||
|  | ||||
|         # create audit entry | ||||
|         AuditLog.objects.create( | ||||
|             username=check.agent.hostname, | ||||
|             agent=check.agent.hostname, | ||||
|             object_type="agent", | ||||
|             action="check_run", | ||||
|             message=f"{check.readable_desc} was run on {check.agent.hostname}. Status: {status}", | ||||
|             after_value=Check.serialize(check), | ||||
|         ) | ||||
|  | ||||
|         return Response(status) | ||||
|  | ||||
|  | ||||
| class CheckRunnerInterval(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def get(self, request, agentid): | ||||
|         agent = get_object_or_404(Agent, agent_id=agentid) | ||||
|         return Response({"agent": agent.pk, "check_interval": agent.check_interval}) | ||||
|  | ||||
|  | ||||
| class TaskRunner(APIView): | ||||
|     """ | ||||
|     For the windows golang agent | ||||
| @@ -199,6 +328,8 @@ class TaskRunner(APIView): | ||||
|         serializer.save(last_run=djangotime.now()) | ||||
|  | ||||
|         new_task = AutomatedTask.objects.get(pk=task.pk) | ||||
|         new_task.handle_alert() | ||||
|  | ||||
|         AuditLog.objects.create( | ||||
|             username=agent.hostname, | ||||
|             agent=agent.hostname, | ||||
| @@ -211,154 +342,6 @@ class TaskRunner(APIView): | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class SaltMinion(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def get(self, request, agentid): | ||||
|         agent = get_object_or_404(Agent, agent_id=agentid) | ||||
|         ret = { | ||||
|             "latestVer": settings.LATEST_SALT_VER, | ||||
|             "currentVer": agent.salt_ver, | ||||
|             "salt_id": agent.salt_id, | ||||
|             "downloadURL": agent.winsalt_dl, | ||||
|         } | ||||
|         return Response(ret) | ||||
|  | ||||
|     def post(self, request): | ||||
|         # accept the salt key | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         if agent.salt_id != request.data["saltid"]: | ||||
|             return notify_error("Salt keys do not match") | ||||
|  | ||||
|         try: | ||||
|             resp = requests.post( | ||||
|                 f"http://{settings.SALT_HOST}:8123/run", | ||||
|                 json=[ | ||||
|                     { | ||||
|                         "client": "wheel", | ||||
|                         "fun": "key.accept", | ||||
|                         "match": request.data["saltid"], | ||||
|                         "username": settings.SALT_USERNAME, | ||||
|                         "password": settings.SALT_PASSWORD, | ||||
|                         "eauth": "pam", | ||||
|                     } | ||||
|                 ], | ||||
|                 timeout=30, | ||||
|             ) | ||||
|         except Exception: | ||||
|             return notify_error("No communication between agent and salt-api") | ||||
|  | ||||
|         try: | ||||
|             data = resp.json()["return"][0]["data"] | ||||
|             minion = data["return"]["minions"][0] | ||||
|         except Exception: | ||||
|             return notify_error("Key error") | ||||
|  | ||||
|         if data["success"] and minion == request.data["saltid"]: | ||||
|             return Response("Salt key was accepted") | ||||
|         else: | ||||
|             return notify_error("Not accepted") | ||||
|  | ||||
|     def patch(self, request): | ||||
|         # sync modules | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         r = agent.salt_api_cmd(timeout=45, func="saltutil.sync_modules") | ||||
|  | ||||
|         if r == "timeout" or r == "error": | ||||
|             return notify_error("Failed to sync salt modules") | ||||
|  | ||||
|         if isinstance(r, list) and any("modules" in i for i in r): | ||||
|             return Response("Successfully synced salt modules") | ||||
|         elif isinstance(r, list) and not r: | ||||
|             return Response("Modules are already in sync") | ||||
|         else: | ||||
|             return notify_error(f"Failed to sync salt modules: {str(r)}") | ||||
|  | ||||
|     def put(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         agent.salt_ver = request.data["ver"] | ||||
|         agent.save(update_fields=["salt_ver"]) | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class WinUpdater(APIView): | ||||
|  | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def get(self, request, agentid): | ||||
|         agent = get_object_or_404(Agent, agent_id=agentid) | ||||
|         agent.delete_superseded_updates() | ||||
|         patches = agent.winupdates.filter(action="approve").exclude(installed=True) | ||||
|         return Response(ApprovedUpdateSerializer(patches, many=True).data) | ||||
|  | ||||
|     # agent sends patch results as it's installing them | ||||
|     def patch(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         kb = request.data["kb"] | ||||
|         results = request.data["results"] | ||||
|         update = agent.winupdates.get(kb=kb) | ||||
|  | ||||
|         if results == "error" or results == "failed": | ||||
|             update.result = results | ||||
|             update.save(update_fields=["result"]) | ||||
|         elif results == "success": | ||||
|             update.result = "success" | ||||
|             update.downloaded = True | ||||
|             update.installed = True | ||||
|             update.date_installed = djangotime.now() | ||||
|             update.save( | ||||
|                 update_fields=[ | ||||
|                     "result", | ||||
|                     "downloaded", | ||||
|                     "installed", | ||||
|                     "date_installed", | ||||
|                 ] | ||||
|             ) | ||||
|         elif results == "alreadyinstalled": | ||||
|             update.result = "success" | ||||
|             update.downloaded = True | ||||
|             update.installed = True | ||||
|             update.save(update_fields=["result", "downloaded", "installed"]) | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|     # agent calls this after it's finished installing all patches | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         reboot_policy = agent.get_patch_policy().reboot_after_install | ||||
|         reboot = False | ||||
|  | ||||
|         if reboot_policy == "always": | ||||
|             reboot = True | ||||
|  | ||||
|         if request.data["reboot"]: | ||||
|             if reboot_policy == "required": | ||||
|                 reboot = True | ||||
|             elif reboot_policy == "never": | ||||
|                 agent.needs_reboot = True | ||||
|                 agent.save(update_fields=["needs_reboot"]) | ||||
|  | ||||
|         if reboot: | ||||
|             if agent.has_nats: | ||||
|                 asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False)) | ||||
|             else: | ||||
|                 agent.salt_api_async( | ||||
|                     func="system.reboot", | ||||
|                     arg=7, | ||||
|                     kwargs={"in_seconds": True}, | ||||
|                 ) | ||||
|  | ||||
|             logger.info(f"{agent.hostname} is rebooting after updates were installed.") | ||||
|         else: | ||||
|             check_for_updates_task.apply_async( | ||||
|                 queue="wupdate", kwargs={"pk": agent.pk, "wait": False} | ||||
|             ) | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class SysInfo(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
| @@ -374,29 +357,6 @@ class SysInfo(APIView): | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class MeshInfo(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def get(self, request, pk): | ||||
|         agent = get_object_or_404(Agent, pk=pk) | ||||
|         return Response(agent.mesh_node_id) | ||||
|  | ||||
|     def patch(self, request, pk): | ||||
|         agent = get_object_or_404(Agent, pk=pk) | ||||
|  | ||||
|         if "nodeidhex" in request.data: | ||||
|             # agent <= 1.1.0 | ||||
|             nodeid = request.data["nodeidhex"] | ||||
|         else: | ||||
|             # agent >= 1.1.1 | ||||
|             nodeid = request.data["nodeid"] | ||||
|  | ||||
|         agent.mesh_node_id = nodeid | ||||
|         agent.save(update_fields=["mesh_node_id"]) | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class MeshExe(APIView): | ||||
|     """ Sends the mesh exe to the installer """ | ||||
|  | ||||
| @@ -461,10 +421,6 @@ class NewAgent(APIView): | ||||
|  | ||||
|         reload_nats() | ||||
|  | ||||
|         # Generate policies for new agent | ||||
|         agent.generate_checks_from_policies() | ||||
|         agent.generate_tasks_from_policies() | ||||
|  | ||||
|         # create agent install audit record | ||||
|         AuditLog.objects.create( | ||||
|             username=request.user, | ||||
|   | ||||
| @@ -6,11 +6,11 @@ from django.db import migrations | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('automation', '0005_auto_20200922_1344'), | ||||
|         ("automation", "0005_auto_20200922_1344"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.DeleteModel( | ||||
|             name='PolicyExclusions', | ||||
|             name="PolicyExclusions", | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -0,0 +1,20 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-12 14:08 | ||||
|  | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('alerts', '0004_auto_20210212_1408'), | ||||
|         ('automation', '0006_delete_policyexclusions'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='policy', | ||||
|             name='alert_template', | ||||
|             field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='policies', to='alerts.alerttemplate'), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,6 +1,5 @@ | ||||
| from django.db import models | ||||
| from agents.models import Agent | ||||
| from clients.models import Site, Client | ||||
| from core.models import CoreSettings | ||||
| from logs.models import BaseAuditModel | ||||
|  | ||||
| @@ -10,6 +9,36 @@ class Policy(BaseAuditModel): | ||||
|     desc = models.CharField(max_length=255, null=True, blank=True) | ||||
|     active = models.BooleanField(default=False) | ||||
|     enforced = models.BooleanField(default=False) | ||||
|     alert_template = models.ForeignKey( | ||||
|         "alerts.AlertTemplate", | ||||
|         related_name="policies", | ||||
|         on_delete=models.SET_NULL, | ||||
|         null=True, | ||||
|         blank=True, | ||||
|     ) | ||||
|  | ||||
|     def save(self, *args, **kwargs): | ||||
|         from automation.tasks import generate_agent_checks_from_policies_task | ||||
|  | ||||
|         # get old policy if exists | ||||
|         old_policy = type(self).objects.get(pk=self.pk) if self.pk else None | ||||
|         super(BaseAuditModel, self).save(*args, **kwargs) | ||||
|  | ||||
|         # generate agent checks only if active and enforced were changed | ||||
|         if old_policy: | ||||
|             if old_policy.active != self.active or old_policy.enforced != self.enforced: | ||||
|                 generate_agent_checks_from_policies_task.delay( | ||||
|                     policypk=self.pk, | ||||
|                     create_tasks=True, | ||||
|                 ) | ||||
|  | ||||
|     def delete(self, *args, **kwargs): | ||||
|         from automation.tasks import generate_agent_checks_task | ||||
|  | ||||
|         agents = list(self.related_agents().only("pk").values_list("pk", flat=True)) | ||||
|         super(BaseAuditModel, self).delete(*args, **kwargs) | ||||
|  | ||||
|         generate_agent_checks_task.delay(agents, create_tasks=True) | ||||
|  | ||||
|     @property | ||||
|     def is_default_server_policy(self): | ||||
| @@ -58,6 +87,11 @@ class Policy(BaseAuditModel): | ||||
|  | ||||
|     @staticmethod | ||||
|     def cascade_policy_tasks(agent): | ||||
|         from autotasks.tasks import delete_win_task_schedule | ||||
|  | ||||
|         from autotasks.models import AutomatedTask | ||||
|         from logs.models import PendingAction | ||||
|  | ||||
|         # List of all tasks to be applied | ||||
|         tasks = list() | ||||
|         added_task_pks = list() | ||||
| @@ -80,7 +114,7 @@ class Policy(BaseAuditModel): | ||||
|             default_policy = CoreSettings.objects.first().server_policy | ||||
|             client_policy = client.server_policy | ||||
|             site_policy = site.server_policy | ||||
|         else: | ||||
|         elif agent.monitoring_type == "workstation": | ||||
|             default_policy = CoreSettings.objects.first().workstation_policy | ||||
|             client_policy = client.workstation_policy | ||||
|             site_policy = site.workstation_policy | ||||
| @@ -107,6 +141,35 @@ class Policy(BaseAuditModel): | ||||
|                     tasks.append(task) | ||||
|                     added_task_pks.append(task.pk) | ||||
|  | ||||
|         # remove policy tasks from agent not included in policy | ||||
|         for task in agent.autotasks.filter( | ||||
|             parent_task__in=[ | ||||
|                 taskpk | ||||
|                 for taskpk in agent_tasks_parent_pks | ||||
|                 if taskpk not in added_task_pks | ||||
|             ] | ||||
|         ): | ||||
|             delete_win_task_schedule.delay(task.pk) | ||||
|  | ||||
|         # handle matching tasks that haven't synced to agent yet or pending deletion due to agent being offline | ||||
|         for action in agent.pendingactions.filter(action_type="taskaction").exclude( | ||||
|             status="completed" | ||||
|         ): | ||||
|             task = AutomatedTask.objects.get(pk=action.details["task_id"]) | ||||
|             if ( | ||||
|                 task.parent_task in agent_tasks_parent_pks | ||||
|                 and task.parent_task in added_task_pks | ||||
|             ): | ||||
|                 agent.remove_matching_pending_task_actions(task.id) | ||||
|  | ||||
|                 PendingAction( | ||||
|                     agent=agent, | ||||
|                     action_type="taskaction", | ||||
|                     details={"action": "taskcreate", "task_id": task.id}, | ||||
|                 ).save() | ||||
|                 task.sync_status = "notsynced" | ||||
|                 task.save(update_fields=["sync_status"]) | ||||
|  | ||||
|         return [task for task in tasks if task.pk not in agent_tasks_parent_pks] | ||||
|  | ||||
|     @staticmethod | ||||
| @@ -132,7 +195,7 @@ class Policy(BaseAuditModel): | ||||
|             default_policy = CoreSettings.objects.first().server_policy | ||||
|             client_policy = client.server_policy | ||||
|             site_policy = site.server_policy | ||||
|         else: | ||||
|         elif agent.monitoring_type == "workstation": | ||||
|             default_policy = CoreSettings.objects.first().workstation_policy | ||||
|             client_policy = client.workstation_policy | ||||
|             site_policy = site.workstation_policy | ||||
| @@ -280,6 +343,15 @@ class Policy(BaseAuditModel): | ||||
|             + eventlog_checks | ||||
|         ) | ||||
|  | ||||
|         # remove policy checks from agent that fell out of policy scope | ||||
|         agent.agentchecks.filter( | ||||
|             parent_check__in=[ | ||||
|                 checkpk | ||||
|                 for checkpk in agent_checks_parent_pks | ||||
|                 if checkpk not in [check.pk for check in final_list] | ||||
|             ] | ||||
|         ).delete() | ||||
|  | ||||
|         return [ | ||||
|             check for check in final_list if check.pk not in agent_checks_parent_pks | ||||
|         ] | ||||
|   | ||||
| @@ -1,18 +1,14 @@ | ||||
| from django.db.models.base import Model | ||||
| from rest_framework.serializers import ( | ||||
|     ModelSerializer, | ||||
|     SerializerMethodField, | ||||
|     StringRelatedField, | ||||
|     ReadOnlyField, | ||||
| ) | ||||
|  | ||||
| from clients.serializers import ClientSerializer, SiteSerializer | ||||
| from agents.serializers import AgentHostnameSerializer | ||||
|  | ||||
| from .models import Policy | ||||
| from agents.models import Agent | ||||
| from autotasks.models import AutomatedTask | ||||
| from checks.models import Check | ||||
| from clients.models import Client, Site | ||||
| from clients.models import Client | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
|  | ||||
|  | ||||
| @@ -24,15 +20,11 @@ class PolicySerializer(ModelSerializer): | ||||
|  | ||||
| class PolicyTableSerializer(ModelSerializer): | ||||
|  | ||||
|     server_clients = ClientSerializer(many=True, read_only=True) | ||||
|     server_sites = SiteSerializer(many=True, read_only=True) | ||||
|     workstation_clients = ClientSerializer(many=True, read_only=True) | ||||
|     workstation_sites = SiteSerializer(many=True, read_only=True) | ||||
|     agents = AgentHostnameSerializer(many=True, read_only=True) | ||||
|     default_server_policy = ReadOnlyField(source="is_default_server_policy") | ||||
|     default_workstation_policy = ReadOnlyField(source="is_default_workstation_policy") | ||||
|     agents_count = SerializerMethodField(read_only=True) | ||||
|     winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True) | ||||
|     alert_template = ReadOnlyField(source="alert_template.id") | ||||
|  | ||||
|     class Meta: | ||||
|         model = Policy | ||||
| @@ -78,49 +70,16 @@ class PolicyCheckSerializer(ModelSerializer): | ||||
|             "assignedtask", | ||||
|             "text_alert", | ||||
|             "email_alert", | ||||
|             "dashboard_alert", | ||||
|         ) | ||||
|         depth = 1 | ||||
|  | ||||
|  | ||||
| class AutoTasksFieldSerializer(ModelSerializer): | ||||
|     assigned_check = PolicyCheckSerializer(read_only=True) | ||||
|     script = ReadOnlyField(source="script.id") | ||||
|  | ||||
|     class Meta: | ||||
|         model = AutomatedTask | ||||
|         fields = ("id", "enabled", "name", "schedule", "assigned_check") | ||||
|         depth = 1 | ||||
|  | ||||
|  | ||||
| class AutoTaskPolicySerializer(ModelSerializer): | ||||
|  | ||||
|     autotasks = AutoTasksFieldSerializer(many=True, read_only=True) | ||||
|  | ||||
|     class Meta: | ||||
|         model = Policy | ||||
|         fields = ( | ||||
|             "id", | ||||
|             "name", | ||||
|             "autotasks", | ||||
|         ) | ||||
|         depth = 2 | ||||
|  | ||||
|  | ||||
| class RelatedClientPolicySerializer(ModelSerializer): | ||||
|     class Meta: | ||||
|         model = Client | ||||
|         fields = ("workstation_policy", "server_policy") | ||||
|         depth = 1 | ||||
|  | ||||
|  | ||||
| class RelatedSitePolicySerializer(ModelSerializer): | ||||
|     class Meta: | ||||
|         model = Site | ||||
|         fields = ("workstation_policy", "server_policy") | ||||
|         depth = 1 | ||||
|  | ||||
|  | ||||
| class RelatedAgentPolicySerializer(ModelSerializer): | ||||
|     class Meta: | ||||
|         model = Agent | ||||
|         fields = ("policy",) | ||||
|         fields = "__all__" | ||||
|         depth = 1 | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| from automation.models import Policy | ||||
| from autotasks.models import AutomatedTask | ||||
| from checks.models import Check | ||||
| from agents.models import Agent | ||||
|  | ||||
| @@ -6,65 +7,86 @@ from tacticalrmm.celery import app | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def generate_agent_checks_from_policies_task( | ||||
|     ### | ||||
|     # copies the policy checks to all affected agents | ||||
|     # | ||||
|     # clear: clears all policy checks first | ||||
|     # create_tasks: also create tasks after checks are generated | ||||
|     ### | ||||
|     policypk, | ||||
|     clear=False, | ||||
|     create_tasks=False, | ||||
| ): | ||||
| # generates policy checks on agents affected by a policy and optionally generate automated tasks | ||||
| def generate_agent_checks_from_policies_task(policypk, create_tasks=False): | ||||
|  | ||||
|     policy = Policy.objects.get(pk=policypk) | ||||
|     for agent in policy.related_agents(): | ||||
|         agent.generate_checks_from_policies(clear=clear) | ||||
|  | ||||
|     if policy.is_default_server_policy and policy.is_default_workstation_policy: | ||||
|         agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type") | ||||
|     elif policy.is_default_server_policy: | ||||
|         agents = Agent.objects.filter(monitoring_type="server").only( | ||||
|             "pk", "monitoring_type" | ||||
|         ) | ||||
|     elif policy.is_default_workstation_policy: | ||||
|         agents = Agent.objects.filter(monitoring_type="workstation").only( | ||||
|             "pk", "monitoring_type" | ||||
|         ) | ||||
|     else: | ||||
|         agents = policy.related_agents().only("pk") | ||||
|  | ||||
|     for agent in agents: | ||||
|         agent.generate_checks_from_policies() | ||||
|         if create_tasks: | ||||
|             agent.generate_tasks_from_policies( | ||||
|                 clear=clear, | ||||
|             ) | ||||
|             agent.generate_tasks_from_policies() | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def generate_agent_checks_by_location_task( | ||||
|     location, mon_type, clear=False, create_tasks=False | ||||
| ): | ||||
| # generates policy checks on a list of agents and optionally generate automated tasks | ||||
| def generate_agent_checks_task(agentpks, create_tasks=False): | ||||
|     for agent in Agent.objects.filter(pk__in=agentpks): | ||||
|         agent.generate_checks_from_policies() | ||||
|  | ||||
|         if create_tasks: | ||||
|             agent.generate_tasks_from_policies() | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| # generates policy checks on agent servers or workstations within a certain client or site and optionally generate automated tasks | ||||
| def generate_agent_checks_by_location_task(location, mon_type, create_tasks=False): | ||||
|  | ||||
|     for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type): | ||||
|         agent.generate_checks_from_policies(clear=clear) | ||||
|         agent.generate_checks_from_policies() | ||||
|  | ||||
|         if create_tasks: | ||||
|             agent.generate_tasks_from_policies(clear=clear) | ||||
|             agent.generate_tasks_from_policies() | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def generate_all_agent_checks_task(mon_type, clear=False, create_tasks=False): | ||||
| # generates policy checks on all agent servers or workstations and optionally generate automated tasks | ||||
| def generate_all_agent_checks_task(mon_type, create_tasks=False): | ||||
|     for agent in Agent.objects.filter(monitoring_type=mon_type): | ||||
|         agent.generate_checks_from_policies(clear=clear) | ||||
|         agent.generate_checks_from_policies() | ||||
|  | ||||
|         if create_tasks: | ||||
|             agent.generate_tasks_from_policies(clear=clear) | ||||
|             agent.generate_tasks_from_policies() | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| # deletes a policy managed check from all agents | ||||
| def delete_policy_check_task(checkpk): | ||||
|  | ||||
|     Check.objects.filter(parent_check=checkpk).delete() | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| # updates policy managed check fields on agents | ||||
| def update_policy_check_fields_task(checkpk): | ||||
|  | ||||
|     check = Check.objects.get(pk=checkpk) | ||||
|  | ||||
|     Check.objects.filter(parent_check=checkpk).update( | ||||
|         threshold=check.threshold, | ||||
|         warning_threshold=check.warning_threshold, | ||||
|         error_threshold=check.error_threshold, | ||||
|         alert_severity=check.alert_severity, | ||||
|         name=check.name, | ||||
|         disk=check.disk, | ||||
|         fails_b4_alert=check.fails_b4_alert, | ||||
|         ip=check.ip, | ||||
|         script=check.script, | ||||
|         script_args=check.script_args, | ||||
|         info_return_codes=check.info_return_codes, | ||||
|         warning_return_codes=check.warning_return_codes, | ||||
|         timeout=check.timeout, | ||||
|         pass_if_start_pending=check.pass_if_start_pending, | ||||
|         pass_if_svc_not_exist=check.pass_if_svc_not_exist, | ||||
| @@ -79,22 +101,31 @@ def update_policy_check_fields_task(checkpk): | ||||
|         search_last_days=check.search_last_days, | ||||
|         email_alert=check.email_alert, | ||||
|         text_alert=check.text_alert, | ||||
|         dashboard_alert=check.dashboard_alert, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def generate_agent_tasks_from_policies_task(policypk, clear=False): | ||||
| # generates policy tasks on agents affected by a policy | ||||
| def generate_agent_tasks_from_policies_task(policypk): | ||||
|  | ||||
|     policy = Policy.objects.get(pk=policypk) | ||||
|     for agent in policy.related_agents(): | ||||
|         agent.generate_tasks_from_policies(clear=clear) | ||||
|  | ||||
|     if policy.is_default_server_policy and policy.is_default_workstation_policy: | ||||
|         agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type") | ||||
|     elif policy.is_default_server_policy: | ||||
|         agents = Agent.objects.filter(monitoring_type="server").only( | ||||
|             "pk", "monitoring_type" | ||||
|         ) | ||||
|     elif policy.is_default_workstation_policy: | ||||
|         agents = Agent.objects.filter(monitoring_type="workstation").only( | ||||
|             "pk", "monitoring_type" | ||||
|         ) | ||||
|     else: | ||||
|         agents = policy.related_agents().only("pk") | ||||
|  | ||||
| @app.task | ||||
| def generate_agent_tasks_by_location_task(location, mon_type, clear=False): | ||||
|  | ||||
|     for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type): | ||||
|         agent.generate_tasks_from_policies(clear=clear) | ||||
|     for agent in agents: | ||||
|         agent.generate_tasks_from_policies() | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| @@ -115,13 +146,23 @@ def run_win_policy_autotask_task(task_pks): | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def update_policy_task_fields_task(taskpk, enabled): | ||||
|     from autotasks.models import AutomatedTask | ||||
| def update_policy_task_fields_task(taskpk, update_agent=False): | ||||
|     from autotasks.tasks import enable_or_disable_win_task | ||||
|  | ||||
|     tasks = AutomatedTask.objects.filter(parent_task=taskpk) | ||||
|     task = AutomatedTask.objects.get(pk=taskpk) | ||||
|  | ||||
|     tasks.update(enabled=enabled) | ||||
|     AutomatedTask.objects.filter(parent_task=taskpk).update( | ||||
|         alert_severity=task.alert_severity, | ||||
|         email_alert=task.email_alert, | ||||
|         text_alert=task.text_alert, | ||||
|         dashboard_alert=task.dashboard_alert, | ||||
|         script=task.script, | ||||
|         script_args=task.script_args, | ||||
|         name=task.name, | ||||
|         timeout=task.timeout, | ||||
|         enabled=task.enabled, | ||||
|     ) | ||||
|  | ||||
|     for autotask in tasks: | ||||
|         enable_or_disable_win_task(autotask.pk, enabled) | ||||
|     if update_agent: | ||||
|         for task in AutomatedTask.objects.filter(parent_task=taskpk): | ||||
|             enable_or_disable_win_task.delay(task.pk, task.enabled) | ||||
|   | ||||
| @@ -9,13 +9,10 @@ from .serializers import ( | ||||
|     PolicyTableSerializer, | ||||
|     PolicySerializer, | ||||
|     PolicyTaskStatusSerializer, | ||||
|     AutoTaskPolicySerializer, | ||||
|     PolicyOverviewSerializer, | ||||
|     PolicyCheckStatusSerializer, | ||||
|     PolicyCheckSerializer, | ||||
|     RelatedAgentPolicySerializer, | ||||
|     RelatedSitePolicySerializer, | ||||
|     RelatedClientPolicySerializer, | ||||
|     AutoTasksFieldSerializer, | ||||
| ) | ||||
|  | ||||
|  | ||||
| @@ -91,7 +88,7 @@ class TestPolicyViews(TacticalTestCase): | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     @patch("automation.tasks.generate_agent_checks_from_policies_task.delay") | ||||
|     def test_update_policy(self, mock_checks_task): | ||||
|     def test_update_policy(self, generate_agent_checks_from_policies_task): | ||||
|         # returns 404 for invalid policy pk | ||||
|         resp = self.client.put("/automation/policies/500/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
| @@ -110,7 +107,7 @@ class TestPolicyViews(TacticalTestCase): | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # only called if active or enforced are updated | ||||
|         mock_checks_task.assert_not_called() | ||||
|         generate_agent_checks_from_policies_task.assert_not_called() | ||||
|  | ||||
|         data = { | ||||
|             "name": "Test Policy Update", | ||||
| @@ -121,42 +118,43 @@ class TestPolicyViews(TacticalTestCase): | ||||
|  | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         mock_checks_task.assert_called_with( | ||||
|             policypk=policy.pk, clear=True, create_tasks=True | ||||
|         generate_agent_checks_from_policies_task.assert_called_with( | ||||
|             policypk=policy.pk, create_tasks=True | ||||
|         ) | ||||
|  | ||||
|         self.check_not_authenticated("put", url) | ||||
|  | ||||
|     @patch("automation.tasks.generate_agent_checks_from_policies_task.delay") | ||||
|     @patch("automation.tasks.generate_agent_tasks_from_policies_task.delay") | ||||
|     def test_delete_policy(self, mock_tasks_task, mock_checks_task): | ||||
|     @patch("automation.tasks.generate_agent_checks_task.delay") | ||||
|     def test_delete_policy(self, generate_agent_checks_task): | ||||
|         # returns 404 for invalid policy pk | ||||
|         resp = self.client.delete("/automation/policies/500/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy") | ||||
|         site = baker.make("clients.Site") | ||||
|         agents = baker.make_recipe( | ||||
|             "agents.agent", site=site, policy=policy, _quantity=3 | ||||
|         ) | ||||
|         url = f"/automation/policies/{policy.pk}/" | ||||
|  | ||||
|         resp = self.client.delete(url, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         mock_checks_task.assert_called_with(policypk=policy.pk, clear=True) | ||||
|         mock_tasks_task.assert_called_with(policypk=policy.pk, clear=True) | ||||
|         generate_agent_checks_task.assert_called_with( | ||||
|             [agent.pk for agent in agents], create_tasks=True | ||||
|         ) | ||||
|  | ||||
|         self.check_not_authenticated("delete", url) | ||||
|  | ||||
|     def test_get_all_policy_tasks(self): | ||||
|         # returns 404 for invalid policy pk | ||||
|         resp = self.client.get("/automation/500/policyautomatedtasks/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         # create policy with tasks | ||||
|         policy = baker.make("automation.Policy") | ||||
|         baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3) | ||||
|         tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3) | ||||
|         url = f"/automation/{policy.pk}/policyautomatedtasks/" | ||||
|  | ||||
|         resp = self.client.get(url, format="json") | ||||
|         serializer = AutoTaskPolicySerializer(policy) | ||||
|         serializer = AutoTasksFieldSerializer(tasks, many=True) | ||||
|  | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
| @@ -182,8 +180,9 @@ class TestPolicyViews(TacticalTestCase): | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_get_policy_check_status(self): | ||||
|         # set data | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         # setup data | ||||
|         site = baker.make("clients.Site") | ||||
|         agent = baker.make_recipe("agents.agent", site=site) | ||||
|         policy = baker.make("automation.Policy") | ||||
|         policy_diskcheck = baker.make_recipe("checks.diskspace_check", policy=policy) | ||||
|         managed_check = baker.make_recipe( | ||||
| @@ -248,274 +247,6 @@ class TestPolicyViews(TacticalTestCase): | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     @patch("agents.models.Agent.generate_checks_from_policies") | ||||
|     @patch("automation.tasks.generate_agent_checks_by_location_task.delay") | ||||
|     def test_update_policy_add( | ||||
|         self, | ||||
|         mock_checks_location_task, | ||||
|         mock_checks_task, | ||||
|     ): | ||||
|         url = f"/automation/related/" | ||||
|  | ||||
|         # data setup | ||||
|         policy = baker.make("automation.Policy") | ||||
|         client = baker.make("clients.Client") | ||||
|         site = baker.make("clients.Site", client=client) | ||||
|         agent = baker.make_recipe("agents.agent", site=site) | ||||
|  | ||||
|         # test add client to policy data | ||||
|         client_server_payload = { | ||||
|             "type": "client", | ||||
|             "pk": agent.client.pk, | ||||
|             "server_policy": policy.pk, | ||||
|         } | ||||
|         client_workstation_payload = { | ||||
|             "type": "client", | ||||
|             "pk": agent.client.pk, | ||||
|             "workstation_policy": policy.pk, | ||||
|         } | ||||
|  | ||||
|         # test add site to policy data | ||||
|         site_server_payload = { | ||||
|             "type": "site", | ||||
|             "pk": agent.site.pk, | ||||
|             "server_policy": policy.pk, | ||||
|         } | ||||
|         site_workstation_payload = { | ||||
|             "type": "site", | ||||
|             "pk": agent.site.pk, | ||||
|             "workstation_policy": policy.pk, | ||||
|         } | ||||
|  | ||||
|         # test add agent to policy data | ||||
|         agent_payload = {"type": "agent", "pk": agent.pk, "policy": policy.pk} | ||||
|  | ||||
|         # test client server policy add | ||||
|         resp = self.client.post(url, client_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site__client_id": client.id}, | ||||
|             mon_type="server", | ||||
|             clear=True, | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test client workstation policy add | ||||
|         resp = self.client.post(url, client_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site__client_id": client.id}, | ||||
|             mon_type="workstation", | ||||
|             clear=True, | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test site add server policy | ||||
|         resp = self.client.post(url, site_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site_id": site.id}, | ||||
|             mon_type="server", | ||||
|             clear=True, | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test site add workstation policy | ||||
|         resp = self.client.post(url, site_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site_id": site.id}, | ||||
|             mon_type="workstation", | ||||
|             clear=True, | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test agent add | ||||
|         resp = self.client.post(url, agent_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_task.assert_called_with(clear=True) | ||||
|         mock_checks_task.reset_mock() | ||||
|  | ||||
|         # Adding the same relations shouldn't trigger mocks | ||||
|         resp = self.client.post(url, client_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         resp = self.client.post(url, client_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         mock_checks_location_task.assert_not_called() | ||||
|  | ||||
|         resp = self.client.post(url, site_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         resp = self.client.post(url, site_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         mock_checks_location_task.assert_not_called() | ||||
|  | ||||
|         resp = self.client.post(url, agent_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_task.assert_not_called() | ||||
|  | ||||
|         # test remove client from policy data | ||||
|         client_server_payload = {"type": "client", "pk": client.pk, "server_policy": 0} | ||||
|         client_workstation_payload = { | ||||
|             "type": "client", | ||||
|             "pk": client.pk, | ||||
|             "workstation_policy": 0, | ||||
|         } | ||||
|  | ||||
|         # test remove site from policy data | ||||
|         site_server_payload = {"type": "site", "pk": site.pk, "server_policy": 0} | ||||
|         site_workstation_payload = { | ||||
|             "type": "site", | ||||
|             "pk": site.pk, | ||||
|             "workstation_policy": 0, | ||||
|         } | ||||
|  | ||||
|         # test remove agent from policy | ||||
|         agent_payload = {"type": "agent", "pk": agent.pk, "policy": 0} | ||||
|  | ||||
|         # test client server policy remove | ||||
|         resp = self.client.post(url, client_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site__client_id": client.id}, | ||||
|             mon_type="server", | ||||
|             clear=True, | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test client workstation policy remove | ||||
|         resp = self.client.post(url, client_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site__client_id": client.id}, | ||||
|             mon_type="workstation", | ||||
|             clear=True, | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test site remove server policy | ||||
|         resp = self.client.post(url, site_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site_id": site.id}, | ||||
|             mon_type="server", | ||||
|             clear=True, | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test site remove workstation policy | ||||
|         resp = self.client.post(url, site_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site_id": site.id}, | ||||
|             mon_type="workstation", | ||||
|             clear=True, | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test agent remove | ||||
|         resp = self.client.post(url, agent_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         # called because the relation changed | ||||
|         mock_checks_task.assert_called_with(clear=True) | ||||
|         mock_checks_task.reset_mock() | ||||
|  | ||||
|         # adding the same relations shouldn't trigger mocks | ||||
|         resp = self.client.post(url, client_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         resp = self.client.post(url, client_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # shouldn't be called since nothing changed | ||||
|         mock_checks_location_task.assert_not_called() | ||||
|  | ||||
|         resp = self.client.post(url, site_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         resp = self.client.post(url, site_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # shouldn't be called since nothing changed | ||||
|         mock_checks_location_task.assert_not_called() | ||||
|  | ||||
|         resp = self.client.post(url, agent_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # shouldn't be called since nothing changed | ||||
|         mock_checks_task.assert_not_called() | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     def test_get_relation_by_type(self): | ||||
|         url = f"/automation/related/" | ||||
|  | ||||
|         # data setup | ||||
|         policy = baker.make("automation.Policy") | ||||
|         client = baker.make("clients.Client", workstation_policy=policy) | ||||
|         site = baker.make("clients.Site", server_policy=policy) | ||||
|         agent = baker.make_recipe("agents.agent", site=site, policy=policy) | ||||
|  | ||||
|         client_payload = {"type": "client", "pk": client.pk} | ||||
|  | ||||
|         # test add site to policy | ||||
|         site_payload = {"type": "site", "pk": site.pk} | ||||
|  | ||||
|         # test add agent to policy | ||||
|         agent_payload = {"type": "agent", "pk": agent.pk} | ||||
|  | ||||
|         # test client relation get | ||||
|         serializer = RelatedClientPolicySerializer(client) | ||||
|         resp = self.client.patch(url, client_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|  | ||||
|         # test site relation get | ||||
|         serializer = RelatedSitePolicySerializer(site) | ||||
|         resp = self.client.patch(url, site_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|  | ||||
|         # test agent relation get | ||||
|         serializer = RelatedAgentPolicySerializer(agent) | ||||
|         resp = self.client.patch(url, agent_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|  | ||||
|         invalid_payload = {"type": "bad_type", "pk": 5} | ||||
|  | ||||
|         resp = self.client.patch(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
|  | ||||
|     def test_get_policy_task_status(self): | ||||
|  | ||||
|         # policy with a task | ||||
| @@ -749,11 +480,10 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         checks = self.create_checks(policy=policy) | ||||
|         site = baker.make("clients.Site") | ||||
|         agent = baker.make_recipe("agents.agent", site=site, policy=policy) | ||||
|         agent = baker.make_recipe("agents.agent", policy=policy) | ||||
|  | ||||
|         # test policy assigned to agent | ||||
|         generate_agent_checks_from_policies_task(policy.id, clear=True) | ||||
|         generate_agent_checks_from_policies_task(policy.id) | ||||
|  | ||||
|         # make sure all checks were created. should be 7 | ||||
|         agent_checks = Agent.objects.get(pk=agent.id).agentchecks.all() | ||||
| @@ -766,16 +496,19 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|             if check.check_type == "diskspace": | ||||
|                 self.assertEqual(check.parent_check, checks[0].id) | ||||
|                 self.assertEqual(check.disk, checks[0].disk) | ||||
|                 self.assertEqual(check.threshold, checks[0].threshold) | ||||
|                 self.assertEqual(check.error_threshold, checks[0].error_threshold) | ||||
|                 self.assertEqual(check.warning_threshold, checks[0].warning_threshold) | ||||
|             elif check.check_type == "ping": | ||||
|                 self.assertEqual(check.parent_check, checks[1].id) | ||||
|                 self.assertEqual(check.ip, checks[1].ip) | ||||
|             elif check.check_type == "cpuload": | ||||
|                 self.assertEqual(check.parent_check, checks[2].id) | ||||
|                 self.assertEqual(check.threshold, checks[2].threshold) | ||||
|                 self.assertEqual(check.error_threshold, checks[0].error_threshold) | ||||
|                 self.assertEqual(check.warning_threshold, checks[0].warning_threshold) | ||||
|             elif check.check_type == "memory": | ||||
|                 self.assertEqual(check.parent_check, checks[3].id) | ||||
|                 self.assertEqual(check.threshold, checks[3].threshold) | ||||
|                 self.assertEqual(check.error_threshold, checks[0].error_threshold) | ||||
|                 self.assertEqual(check.warning_threshold, checks[0].warning_threshold) | ||||
|             elif check.check_type == "winsvc": | ||||
|                 self.assertEqual(check.parent_check, checks[4].id) | ||||
|                 self.assertEqual(check.svc_name, checks[4].svc_name) | ||||
| @@ -811,71 +544,245 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|             7, | ||||
|         ) | ||||
|  | ||||
|     def test_generating_agent_policy_checks_by_location(self): | ||||
|         from .tasks import generate_agent_checks_by_location_task | ||||
|     @patch("automation.tasks.generate_agent_checks_by_location_task.delay") | ||||
|     def test_generating_agent_policy_checks_by_location( | ||||
|         self, generate_agent_checks_by_location_task | ||||
|     ): | ||||
|         from automation.tasks import ( | ||||
|             generate_agent_checks_by_location_task as generate_agent_checks, | ||||
|         ) | ||||
|  | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         self.create_checks(policy=policy) | ||||
|         clients = baker.make( | ||||
|             "clients.Client", | ||||
|             _quantity=2, | ||||
|             server_policy=policy, | ||||
|             workstation_policy=policy, | ||||
|         ) | ||||
|         sites = baker.make("clients.Site", client=cycle(clients), _quantity=4) | ||||
|         server_agent = baker.make_recipe("agents.server_agent", site=sites[0]) | ||||
|         workstation_agent = baker.make_recipe("agents.workstation_agent", site=sites[2]) | ||||
|         agent1 = baker.make_recipe("agents.server_agent", site=sites[1]) | ||||
|         agent2 = baker.make_recipe("agents.workstation_agent", site=sites[3]) | ||||
|  | ||||
|         generate_agent_checks_by_location_task( | ||||
|             {"site_id": sites[0].id}, | ||||
|             "server", | ||||
|             clear=True, | ||||
|         baker.make( | ||||
|             "autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3 | ||||
|         ) | ||||
|  | ||||
|         server_agent = baker.make_recipe("agents.server_agent") | ||||
|         workstation_agent = baker.make_recipe("agents.workstation_agent") | ||||
|  | ||||
|         # no checks should be preset on agents | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|  | ||||
|         # set workstation policy on client and policy checks should be there | ||||
|         workstation_agent.client.workstation_policy = policy | ||||
|         workstation_agent.client.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site__client_id": workstation_agent.client.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site__client_id": workstation_agent.client.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # server_agent should have policy checks and the other agents should not | ||||
|         # make sure the checks were added | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 7 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|         # remove workstation policy from client | ||||
|         workstation_agent.client.workstation_policy = None | ||||
|         workstation_agent.client.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site__client_id": workstation_agent.client.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site__client_id": workstation_agent.client.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure the checks were removed | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|         # set server policy on client and policy checks should be there | ||||
|         server_agent.client.server_policy = policy | ||||
|         server_agent.client.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site__client_id": server_agent.client.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site__client_id": server_agent.client.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure checks were added | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7) | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 0) | ||||
|  | ||||
|         generate_agent_checks_by_location_task( | ||||
|             {"site__client_id": clients[0].id}, | ||||
|             "workstation", | ||||
|             clear=True, | ||||
|         # remove server policy from client | ||||
|         server_agent.client.server_policy = None | ||||
|         server_agent.client.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site__client_id": server_agent.client.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         # workstation_agent should now have policy checks and the other agents should not | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site__client_id": server_agent.client.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure checks were removed | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|  | ||||
|         # set workstation policy on site and policy checks should be there | ||||
|         workstation_agent.site.workstation_policy = policy | ||||
|         workstation_agent.site.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site_id": workstation_agent.site.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site_id": workstation_agent.site.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure checks were added on workstation | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 7 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 0) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent2.id).agentchecks.count(), 0) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|     def test_generating_policy_checks_for_all_agents(self): | ||||
|         from .tasks import generate_all_agent_checks_task | ||||
|         # remove workstation policy from site | ||||
|         workstation_agent.site.workstation_policy = None | ||||
|         workstation_agent.site.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site_id": workstation_agent.site.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site_id": workstation_agent.site.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure checks were removed | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|         # set server policy on site and policy checks should be there | ||||
|         server_agent.site.server_policy = policy | ||||
|         server_agent.site.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site_id": server_agent.site.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site_id": server_agent.site.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure checks were added | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7) | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|  | ||||
|         # remove server policy from site | ||||
|         server_agent.site.server_policy = None | ||||
|         server_agent.site.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site_id": server_agent.site.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site_id": server_agent.site.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure checks were removed | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|  | ||||
|     @patch("automation.tasks.generate_all_agent_checks_task.delay") | ||||
|     def test_generating_policy_checks_for_all_agents( | ||||
|         self, generate_all_agent_checks_task | ||||
|     ): | ||||
|         from .tasks import generate_all_agent_checks_task as generate_all_checks | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         self.create_checks(policy=policy) | ||||
|  | ||||
|         site = baker.make("clients.Site") | ||||
|         server_agents = baker.make_recipe("agents.server_agent", site=site, _quantity=3) | ||||
|         workstation_agents = baker.make_recipe( | ||||
|             "agents.workstation_agent", site=site, _quantity=4 | ||||
|         ) | ||||
|         server_agents = baker.make_recipe("agents.server_agent", _quantity=3) | ||||
|         workstation_agents = baker.make_recipe("agents.workstation_agent", _quantity=4) | ||||
|         core = CoreSettings.objects.first() | ||||
|         core.server_policy = policy | ||||
|         core.workstation_policy = policy | ||||
|         core.save() | ||||
|  | ||||
|         generate_all_agent_checks_task("server", clear=True, create_tasks=True) | ||||
|         generate_all_agent_checks_task.assert_called_with( | ||||
|             mon_type="server", create_tasks=True | ||||
|         ) | ||||
|         generate_all_agent_checks_task.reset_mock() | ||||
|         generate_all_checks(mon_type="server", create_tasks=True) | ||||
|  | ||||
|         # all servers should have 7 checks | ||||
|         for agent in server_agents: | ||||
| @@ -884,24 +791,50 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|         for agent in workstation_agents: | ||||
|             self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|         generate_all_agent_checks_task("workstation", clear=True, create_tasks=True) | ||||
|         core.server_policy = None | ||||
|         core.workstation_policy = policy | ||||
|         core.save() | ||||
|  | ||||
|         # all agents should have 7 checks now | ||||
|         generate_all_agent_checks_task.assert_any_call( | ||||
|             mon_type="workstation", create_tasks=True | ||||
|         ) | ||||
|         generate_all_agent_checks_task.assert_any_call( | ||||
|             mon_type="server", create_tasks=True | ||||
|         ) | ||||
|         generate_all_agent_checks_task.reset_mock() | ||||
|         generate_all_checks(mon_type="server", create_tasks=True) | ||||
|         generate_all_checks(mon_type="workstation", create_tasks=True) | ||||
|  | ||||
|         # all workstations should have 7 checks | ||||
|         for agent in server_agents: | ||||
|             self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7) | ||||
|             self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|         for agent in workstation_agents: | ||||
|             self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7) | ||||
|  | ||||
|         core.workstation_policy = None | ||||
|         core.save() | ||||
|  | ||||
|         generate_all_agent_checks_task.assert_called_with( | ||||
|             mon_type="workstation", create_tasks=True | ||||
|         ) | ||||
|         generate_all_agent_checks_task.reset_mock() | ||||
|         generate_all_checks(mon_type="workstation", create_tasks=True) | ||||
|  | ||||
|         # nothing should have the checks | ||||
|         for agent in server_agents: | ||||
|             self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|         for agent in workstation_agents: | ||||
|             self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|     def test_delete_policy_check(self): | ||||
|         from .tasks import delete_policy_check_task | ||||
|         from .models import Policy | ||||
|  | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         self.create_checks(policy=policy) | ||||
|         site = baker.make("clients.Site") | ||||
|         agent = baker.make_recipe("agents.server_agent", site=site, policy=policy) | ||||
|         agent.generate_checks_from_policies() | ||||
|         agent = baker.make_recipe("agents.server_agent", policy=policy) | ||||
|  | ||||
|         # make sure agent has 7 checks | ||||
|         self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7) | ||||
| @@ -926,7 +859,6 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         self.create_checks(policy=policy) | ||||
|         agent = baker.make_recipe("agents.server_agent", policy=policy) | ||||
|         agent.generate_checks_from_policies() | ||||
|  | ||||
|         # make sure agent has 7 checks | ||||
|         self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7) | ||||
| @@ -958,10 +890,9 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|         tasks = baker.make( | ||||
|             "autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3 | ||||
|         ) | ||||
|         site = baker.make("clients.Site") | ||||
|         agent = baker.make_recipe("agents.server_agent", site=site, policy=policy) | ||||
|         agent = baker.make_recipe("agents.server_agent", policy=policy) | ||||
|  | ||||
|         generate_agent_tasks_from_policies_task(policy.id, clear=True) | ||||
|         generate_agent_tasks_from_policies_task(policy.id) | ||||
|  | ||||
|         agent_tasks = Agent.objects.get(pk=agent.id).autotasks.all() | ||||
|  | ||||
| @@ -980,63 +911,19 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|                 self.assertEqual(task.parent_task, tasks[2].id) | ||||
|                 self.assertEqual(task.name, tasks[2].name) | ||||
|  | ||||
|     def test_generate_agent_tasks_by_location(self): | ||||
|         from .tasks import generate_agent_tasks_by_location_task | ||||
|  | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         baker.make( | ||||
|             "autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3 | ||||
|         ) | ||||
|         clients = baker.make( | ||||
|             "clients.Client", | ||||
|             _quantity=2, | ||||
|             server_policy=policy, | ||||
|             workstation_policy=policy, | ||||
|         ) | ||||
|         sites = baker.make("clients.Site", client=cycle(clients), _quantity=4) | ||||
|         server_agent = baker.make_recipe("agents.server_agent", site=sites[0]) | ||||
|         workstation_agent = baker.make_recipe("agents.workstation_agent", site=sites[2]) | ||||
|         agent1 = baker.make_recipe("agents.agent", site=sites[1]) | ||||
|         agent2 = baker.make_recipe("agents.agent", site=sites[3]) | ||||
|  | ||||
|         generate_agent_tasks_by_location_task( | ||||
|             {"site_id": sites[0].id}, "server", clear=True | ||||
|         ) | ||||
|  | ||||
|         # all servers in site1 and site2 should have 3 tasks | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).autotasks.count(), 0 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).autotasks.count(), 3) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent1.id).autotasks.count(), 0) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0) | ||||
|  | ||||
|         generate_agent_tasks_by_location_task( | ||||
|             {"site__client_id": clients[0].id}, "workstation", clear=True | ||||
|         ) | ||||
|  | ||||
|         # all workstations in Default1 should have 3 tasks | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).autotasks.count(), 3 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).autotasks.count(), 3) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent1.id).autotasks.count(), 0) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0) | ||||
|  | ||||
|     @patch("autotasks.tasks.delete_win_task_schedule.delay") | ||||
|     def test_delete_policy_tasks(self, delete_win_task_schedule): | ||||
|         from .tasks import delete_policy_autotask_task | ||||
|  | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3) | ||||
|         site = baker.make("clients.Site") | ||||
|         agent = baker.make_recipe("agents.server_agent", site=site, policy=policy) | ||||
|         agent.generate_tasks_from_policies() | ||||
|         agent = baker.make_recipe("agents.server_agent", policy=policy) | ||||
|  | ||||
|         delete_policy_autotask_task(tasks[0].id) | ||||
|  | ||||
|         delete_win_task_schedule.assert_called_with(agent.autotasks.first().id) | ||||
|         delete_win_task_schedule.assert_called_with( | ||||
|             agent.autotasks.get(parent_task=tasks[0].id).id | ||||
|         ) | ||||
|  | ||||
|     @patch("autotasks.tasks.run_win_task.delay") | ||||
|     def test_run_policy_task(self, run_win_task): | ||||
| @@ -1051,25 +938,46 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|         for task in tasks: | ||||
|             run_win_task.assert_any_call(task.id) | ||||
|  | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     def test_update_policy_tasks(self, nats_cmd): | ||||
|     @patch("autotasks.tasks.enable_or_disable_win_task.delay") | ||||
|     def test_update_policy_tasks(self, enable_or_disable_win_task): | ||||
|         from .tasks import update_policy_task_fields_task | ||||
|         from autotasks.models import AutomatedTask | ||||
|  | ||||
|         nats_cmd.return_value = "ok" | ||||
|  | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         tasks = baker.make( | ||||
|             "autotasks.AutomatedTask", enabled=True, policy=policy, _quantity=3 | ||||
|         ) | ||||
|         site = baker.make("clients.Site") | ||||
|         agent = baker.make_recipe("agents.server_agent", site=site, policy=policy) | ||||
|         agent.generate_tasks_from_policies() | ||||
|         agent = baker.make_recipe("agents.server_agent", policy=policy) | ||||
|  | ||||
|         tasks[0].enabled = False | ||||
|         tasks[0].save() | ||||
|  | ||||
|         update_policy_task_fields_task(tasks[0].id, enabled=False) | ||||
|         update_policy_task_fields_task(tasks[0].id) | ||||
|         enable_or_disable_win_task.assert_not_called() | ||||
|  | ||||
|         self.assertFalse(AutomatedTask.objects.get(parent_task=tasks[0].id).enabled) | ||||
|         self.assertFalse(agent.autotasks.get(parent_task=tasks[0].id).enabled) | ||||
|  | ||||
|         update_policy_task_fields_task(tasks[0].id, update_agent=True) | ||||
|         enable_or_disable_win_task.assert_called_with( | ||||
|             agent.autotasks.get(parent_task=tasks[0].id).id, False | ||||
|         ) | ||||
|  | ||||
|     @patch("agents.models.Agent.generate_tasks_from_policies") | ||||
|     @patch("agents.models.Agent.generate_checks_from_policies") | ||||
|     def test_generate_agent_checks_with_agentpks(self, generate_checks, generate_tasks): | ||||
|         from automation.tasks import generate_agent_checks_task | ||||
|  | ||||
|         agents = baker.make_recipe("agents.agent", _quantity=5) | ||||
|  | ||||
|         # reset because creating agents triggers it | ||||
|         generate_checks.reset_mock() | ||||
|         generate_tasks.reset_mock() | ||||
|  | ||||
|         generate_agent_checks_task([agent.pk for agent in agents]) | ||||
|         self.assertEquals(generate_checks.call_count, 5) | ||||
|         generate_tasks.assert_not_called() | ||||
|         generate_checks.reset_mock() | ||||
|  | ||||
|         generate_agent_checks_task([agent.pk for agent in agents], create_tasks=True) | ||||
|         self.assertEquals(generate_checks.call_count, 5) | ||||
|         self.assertEquals(generate_checks.call_count, 5) | ||||
|   | ||||
| @@ -4,7 +4,6 @@ from . import views | ||||
| urlpatterns = [ | ||||
|     path("policies/", views.GetAddPolicies.as_view()), | ||||
|     path("policies/<int:pk>/related/", views.GetRelated.as_view()), | ||||
|     path("related/", views.GetRelated.as_view()), | ||||
|     path("policies/overview/", views.OverviewPolicy.as_view()), | ||||
|     path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()), | ||||
|     path("<int:pk>/policychecks/", views.PolicyCheck.as_view()), | ||||
|   | ||||
| @@ -2,11 +2,10 @@ from django.shortcuts import get_object_or_404 | ||||
|  | ||||
| from rest_framework.views import APIView | ||||
| from rest_framework.response import Response | ||||
| from rest_framework import status | ||||
|  | ||||
| from .models import Policy | ||||
| from agents.models import Agent | ||||
| from clients.models import Client, Site | ||||
| from clients.models import Client | ||||
| from checks.models import Check | ||||
| from autotasks.models import AutomatedTask | ||||
| from winupdate.models import WinUpdatePolicy | ||||
| @@ -22,16 +21,10 @@ from .serializers import ( | ||||
|     PolicyCheckStatusSerializer, | ||||
|     PolicyCheckSerializer, | ||||
|     PolicyTaskStatusSerializer, | ||||
|     AutoTaskPolicySerializer, | ||||
|     RelatedClientPolicySerializer, | ||||
|     RelatedSitePolicySerializer, | ||||
|     RelatedAgentPolicySerializer, | ||||
|     AutoTasksFieldSerializer, | ||||
| ) | ||||
|  | ||||
| from .tasks import ( | ||||
|     generate_agent_checks_from_policies_task, | ||||
|     generate_agent_checks_by_location_task, | ||||
|     generate_agent_tasks_from_policies_task, | ||||
|     run_win_policy_autotask_task, | ||||
| ) | ||||
|  | ||||
| @@ -72,30 +65,14 @@ class GetUpdateDeletePolicy(APIView): | ||||
|     def put(self, request, pk): | ||||
|         policy = get_object_or_404(Policy, pk=pk) | ||||
|  | ||||
|         old_active = policy.active | ||||
|         old_enforced = policy.enforced | ||||
|  | ||||
|         serializer = PolicySerializer(instance=policy, data=request.data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         saved_policy = serializer.save() | ||||
|  | ||||
|         # Generate agent checks only if active and enforced were changed | ||||
|         if saved_policy.active != old_active or saved_policy.enforced != old_enforced: | ||||
|             generate_agent_checks_from_policies_task.delay( | ||||
|                 policypk=policy.pk, | ||||
|                 clear=(not saved_policy.active or not saved_policy.enforced), | ||||
|                 create_tasks=(saved_policy.active != old_active), | ||||
|             ) | ||||
|         serializer.save() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|     def delete(self, request, pk): | ||||
|         policy = get_object_or_404(Policy, pk=pk) | ||||
|  | ||||
|         # delete all managed policy checks off of agents | ||||
|         generate_agent_checks_from_policies_task.delay(policypk=policy.pk, clear=True) | ||||
|         generate_agent_tasks_from_policies_task.delay(policypk=policy.pk, clear=True) | ||||
|         policy.delete() | ||||
|         get_object_or_404(Policy, pk=pk).delete() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
| @@ -104,8 +81,8 @@ class PolicyAutoTask(APIView): | ||||
|  | ||||
|     # tasks associated with policy | ||||
|     def get(self, request, pk): | ||||
|         policy = get_object_or_404(Policy, pk=pk) | ||||
|         return Response(AutoTaskPolicySerializer(policy).data) | ||||
|         tasks = AutomatedTask.objects.filter(policy=pk) | ||||
|         return Response(AutoTasksFieldSerializer(tasks, many=True).data) | ||||
|  | ||||
|     # get status of all tasks | ||||
|     def patch(self, request, task): | ||||
| @@ -184,213 +161,12 @@ class GetRelated(APIView): | ||||
|         ).data | ||||
|  | ||||
|         response["agents"] = AgentHostnameSerializer( | ||||
|             policy.related_agents(), | ||||
|             policy.related_agents().only("pk", "hostname"), | ||||
|             many=True, | ||||
|         ).data | ||||
|  | ||||
|         return Response(response) | ||||
|  | ||||
|     # update agents, clients, sites to policy | ||||
|     def post(self, request): | ||||
|  | ||||
|         related_type = request.data["type"] | ||||
|         pk = request.data["pk"] | ||||
|  | ||||
|         # workstation policy is set | ||||
|         if ( | ||||
|             "workstation_policy" in request.data | ||||
|             and request.data["workstation_policy"] != 0 | ||||
|         ): | ||||
|             policy = get_object_or_404(Policy, pk=request.data["workstation_policy"]) | ||||
|  | ||||
|             if related_type == "client": | ||||
|                 client = get_object_or_404(Client, pk=pk) | ||||
|  | ||||
|                 # Check and see if workstation policy changed and regenerate policies | ||||
|                 if ( | ||||
|                     not client.workstation_policy | ||||
|                     or client.workstation_policy | ||||
|                     and client.workstation_policy.pk != policy.pk | ||||
|                 ): | ||||
|                     client.workstation_policy = policy | ||||
|                     client.save() | ||||
|  | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site__client_id": client.id}, | ||||
|                         mon_type="workstation", | ||||
|                         clear=True, | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|             if related_type == "site": | ||||
|                 site = get_object_or_404(Site, pk=pk) | ||||
|  | ||||
|                 # Check and see if workstation policy changed and regenerate policies | ||||
|                 if ( | ||||
|                     not site.workstation_policy | ||||
|                     or site.workstation_policy | ||||
|                     and site.workstation_policy.pk != policy.pk | ||||
|                 ): | ||||
|                     site.workstation_policy = policy | ||||
|                     site.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site_id": site.id}, | ||||
|                         mon_type="workstation", | ||||
|                         clear=True, | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|         # server policy is set | ||||
|         if "server_policy" in request.data and request.data["server_policy"] != 0: | ||||
|             policy = get_object_or_404(Policy, pk=request.data["server_policy"]) | ||||
|  | ||||
|             if related_type == "client": | ||||
|                 client = get_object_or_404(Client, pk=pk) | ||||
|  | ||||
|                 # Check and see if server policy changed and regenerate policies | ||||
|                 if ( | ||||
|                     not client.server_policy | ||||
|                     or client.server_policy | ||||
|                     and client.server_policy.pk != policy.pk | ||||
|                 ): | ||||
|                     client.server_policy = policy | ||||
|                     client.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site__client_id": client.id}, | ||||
|                         mon_type="server", | ||||
|                         clear=True, | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|             if related_type == "site": | ||||
|                 site = get_object_or_404(Site, pk=pk) | ||||
|  | ||||
|                 # Check and see if server policy changed and regenerate policies | ||||
|                 if ( | ||||
|                     not site.server_policy | ||||
|                     or site.server_policy | ||||
|                     and site.server_policy.pk != policy.pk | ||||
|                 ): | ||||
|                     site.server_policy = policy | ||||
|                     site.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site_id": site.id}, | ||||
|                         mon_type="server", | ||||
|                         clear=True, | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|         # If workstation policy was cleared | ||||
|         if ( | ||||
|             "workstation_policy" in request.data | ||||
|             and request.data["workstation_policy"] == 0 | ||||
|         ): | ||||
|             if related_type == "client": | ||||
|                 client = get_object_or_404(Client, pk=pk) | ||||
|  | ||||
|                 # Check if workstation policy is set and update it to None | ||||
|                 if client.workstation_policy: | ||||
|  | ||||
|                     client.workstation_policy = None | ||||
|                     client.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site__client_id": client.id}, | ||||
|                         mon_type="workstation", | ||||
|                         clear=True, | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|             if related_type == "site": | ||||
|                 site = get_object_or_404(Site, pk=pk) | ||||
|  | ||||
|                 # Check if workstation policy is set and update it to None | ||||
|                 if site.workstation_policy: | ||||
|  | ||||
|                     site.workstation_policy = None | ||||
|                     site.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site_id": site.id}, | ||||
|                         mon_type="workstation", | ||||
|                         clear=True, | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|         # server policy cleared | ||||
|         if "server_policy" in request.data and request.data["server_policy"] == 0: | ||||
|  | ||||
|             if related_type == "client": | ||||
|                 client = get_object_or_404(Client, pk=pk) | ||||
|  | ||||
|                 # Check if server policy is set and update it to None | ||||
|                 if client.server_policy: | ||||
|  | ||||
|                     client.server_policy = None | ||||
|                     client.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site__client_id": client.id}, | ||||
|                         mon_type="server", | ||||
|                         clear=True, | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|             if related_type == "site": | ||||
|                 site = get_object_or_404(Site, pk=pk) | ||||
|                 # Check if server policy is set and update it to None | ||||
|                 if site.server_policy: | ||||
|  | ||||
|                     site.server_policy = None | ||||
|                     site.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site_id": site.pk}, | ||||
|                         mon_type="server", | ||||
|                         clear=True, | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|         # agent policies | ||||
|         if related_type == "agent": | ||||
|             agent = get_object_or_404(Agent, pk=pk) | ||||
|  | ||||
|             if "policy" in request.data and request.data["policy"] != 0: | ||||
|                 policy = Policy.objects.get(pk=request.data["policy"]) | ||||
|  | ||||
|                 # Check and see if policy changed and regenerate policies | ||||
|                 if not agent.policy or agent.policy and agent.policy.pk != policy.pk: | ||||
|                     agent.policy = policy | ||||
|                     agent.save() | ||||
|                     agent.generate_checks_from_policies(clear=True) | ||||
|                     agent.generate_tasks_from_policies(clear=True) | ||||
|             else: | ||||
|                 if agent.policy: | ||||
|                     agent.policy = None | ||||
|                     agent.save() | ||||
|                     agent.generate_checks_from_policies(clear=True) | ||||
|                     agent.generate_tasks_from_policies(clear=True) | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|     # view to get policies set on client, site, and workstation | ||||
|     def patch(self, request): | ||||
|         related_type = request.data["type"] | ||||
|  | ||||
|         # client, site, or agent pk | ||||
|         pk = request.data["pk"] | ||||
|  | ||||
|         if related_type == "agent": | ||||
|             agent = Agent.objects.get(pk=pk) | ||||
|             return Response(RelatedAgentPolicySerializer(agent).data) | ||||
|  | ||||
|         if related_type == "site": | ||||
|             site = Site.objects.get(pk=pk) | ||||
|             return Response(RelatedSitePolicySerializer(site).data) | ||||
|  | ||||
|         if related_type == "client": | ||||
|             client = Client.objects.get(pk=pk) | ||||
|             return Response(RelatedClientPolicySerializer(client).data) | ||||
|  | ||||
|         content = {"error": "Data was submitted incorrectly"} | ||||
|         return Response(content, status=status.HTTP_400_BAD_REQUEST) | ||||
|  | ||||
|  | ||||
| class UpdatePatchPolicy(APIView): | ||||
|  | ||||
| @@ -422,11 +198,15 @@ class UpdatePatchPolicy(APIView): | ||||
|  | ||||
|         agents = None | ||||
|         if "client" in request.data: | ||||
|             agents = Agent.objects.filter(site__client_id=request.data["client"]) | ||||
|             agents = Agent.objects.prefetch_related("winupdatepolicy").filter( | ||||
|                 site__client_id=request.data["client"] | ||||
|             ) | ||||
|         elif "site" in request.data: | ||||
|             agents = Agent.objects.filter(site_id=request.data["site"]) | ||||
|             agents = Agent.objects.prefetch_related("winupdatepolicy").filter( | ||||
|                 site_id=request.data["site"] | ||||
|             ) | ||||
|         else: | ||||
|             agents = Agent.objects.all() | ||||
|             agents = Agent.objects.prefetch_related("winupdatepolicy").only("pk") | ||||
|  | ||||
|         for agent in agents: | ||||
|             winupdatepolicy = agent.winupdatepolicy.get() | ||||
|   | ||||
| @@ -7,7 +7,7 @@ class Command(BaseCommand): | ||||
|     help = "Checks for orphaned tasks on all agents and removes them" | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|         agents = Agent.objects.all() | ||||
|         agents = Agent.objects.only("pk", "last_seen", "overdue_time", "offline_time") | ||||
|         online = [i for i in agents if i.status == "online"] | ||||
|         for agent in online: | ||||
|             remove_orphaned_win_tasks.delay(agent.pk) | ||||
|   | ||||
| @@ -6,13 +6,13 @@ from django.db import migrations, models | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0008_auto_20201030_1515'), | ||||
|         ("autotasks", "0008_auto_20201030_1515"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='run_time_bit_weekdays', | ||||
|             model_name="automatedtask", | ||||
|             name="run_time_bit_weekdays", | ||||
|             field=models.IntegerField(blank=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-27 22:21 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0010_migrate_days_to_bitdays'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='alert_severity', | ||||
|             field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='None', max_length=30, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,33 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-28 04:17 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0011_automatedtask_alert_severity'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='email_alert', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='email_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='text_alert', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='text_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-29 03:07 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0012_auto_20210128_0417'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='automatedtask', | ||||
|             name='alert_severity', | ||||
|             field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=30), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-29 21:11 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0013_auto_20210129_0307'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='dashboard_alert', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,23 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-05 17:28 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0014_automatedtask_dashboard_alert'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='resolved_email_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='resolved_text_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-05 21:17 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0015_auto_20210205_1728'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='status', | ||||
|             field=models.CharField(choices=[('passing', 'Passing'), ('failing', 'Failing'), ('pending', 'Pending')], default='pending', max_length=30), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,29 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-10 15:12 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0016_automatedtask_status'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='email_sent', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='resolved_email_sent', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='resolved_text_sent', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='text_sent', | ||||
|         ), | ||||
|     ] | ||||
| @@ -3,13 +3,20 @@ import random | ||||
| import string | ||||
| import datetime as dt | ||||
|  | ||||
| from django.utils import timezone as djangotime | ||||
| from django.conf import settings | ||||
| from django.db import models | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.db.models.fields import DateTimeField | ||||
| from automation.models import Policy | ||||
| from logs.models import BaseAuditModel | ||||
| from tacticalrmm.utils import bitdays_to_string | ||||
|  | ||||
| from loguru import logger | ||||
|  | ||||
| from alerts.models import SEVERITY_CHOICES | ||||
|  | ||||
| logger.configure(**settings.LOG_CONFIG) | ||||
|  | ||||
| RUN_TIME_DAY_CHOICES = [ | ||||
|     (0, "Monday"), | ||||
|     (1, "Tuesday"), | ||||
| @@ -33,6 +40,12 @@ SYNC_STATUS_CHOICES = [ | ||||
|     ("pendingdeletion", "Pending Deletion on Agent"), | ||||
| ] | ||||
|  | ||||
| TASK_STATUS_CHOICES = [ | ||||
|     ("passing", "Passing"), | ||||
|     ("failing", "Failing"), | ||||
|     ("pending", "Pending"), | ||||
| ] | ||||
|  | ||||
|  | ||||
| class AutomatedTask(BaseAuditModel): | ||||
|     agent = models.ForeignKey( | ||||
| @@ -43,7 +56,7 @@ class AutomatedTask(BaseAuditModel): | ||||
|         blank=True, | ||||
|     ) | ||||
|     policy = models.ForeignKey( | ||||
|         Policy, | ||||
|         "automation.Policy", | ||||
|         related_name="autotasks", | ||||
|         null=True, | ||||
|         blank=True, | ||||
| @@ -94,9 +107,18 @@ class AutomatedTask(BaseAuditModel): | ||||
|     execution_time = models.CharField(max_length=100, default="0.0000") | ||||
|     last_run = models.DateTimeField(null=True, blank=True) | ||||
|     enabled = models.BooleanField(default=True) | ||||
|     status = models.CharField( | ||||
|         max_length=30, choices=TASK_STATUS_CHOICES, default="pending" | ||||
|     ) | ||||
|     sync_status = models.CharField( | ||||
|         max_length=100, choices=SYNC_STATUS_CHOICES, default="notsynced" | ||||
|     ) | ||||
|     alert_severity = models.CharField( | ||||
|         max_length=30, choices=SEVERITY_CHOICES, default="info" | ||||
|     ) | ||||
|     email_alert = models.BooleanField(default=False) | ||||
|     text_alert = models.BooleanField(default=False) | ||||
|     dashboard_alert = models.BooleanField(default=False) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.name | ||||
| @@ -141,22 +163,50 @@ class AutomatedTask(BaseAuditModel): | ||||
|     def create_policy_task(self, agent=None, policy=None): | ||||
|         from .tasks import create_win_task_schedule | ||||
|  | ||||
|         # if policy is present, then this task is being copied to another policy | ||||
|         # if agent is present, then this task is being created on an agent from a policy | ||||
|  | ||||
|         # exit if neither are set or if both are set | ||||
|         if not agent and not policy or agent and policy: | ||||
|             return | ||||
|  | ||||
|         assigned_check = None | ||||
|  | ||||
|         # get correct assigned check to task if set | ||||
|         if agent and self.assigned_check: | ||||
|             assigned_check = agent.agentchecks.get(parent_check=self.assigned_check.pk) | ||||
|             # check if there is a matching check on the agent | ||||
|             if agent.agentchecks.filter(parent_check=self.assigned_check.pk).exists(): | ||||
|                 assigned_check = agent.agentchecks.filter( | ||||
|                     parent_check=self.assigned_check.pk | ||||
|                 ).first() | ||||
|             # check was overriden by agent and we need to use that agents check | ||||
|             else: | ||||
|                 if agent.agentchecks.filter( | ||||
|                     check_type=self.assigned_check.check_type, overriden_by_policy=True | ||||
|                 ).exists(): | ||||
|                     assigned_check = agent.agentchecks.filter( | ||||
|                         check_type=self.assigned_check.check_type, | ||||
|                         overriden_by_policy=True, | ||||
|                     ).first() | ||||
|         elif policy and self.assigned_check: | ||||
|             assigned_check = policy.policychecks.get(name=self.assigned_check.name) | ||||
|             if policy.policychecks.filter(name=self.assigned_check.name).exists(): | ||||
|                 assigned_check = policy.policychecks.filter( | ||||
|                     name=self.assigned_check.name | ||||
|                 ).first() | ||||
|             else: | ||||
|                 assigned_check = policy.policychecks.filter( | ||||
|                     check_type=self.assigned_check.check_type | ||||
|                 ).first() | ||||
|  | ||||
|         task = AutomatedTask.objects.create( | ||||
|             agent=agent, | ||||
|             policy=policy, | ||||
|             managed_by_policy=bool(agent), | ||||
|             parent_task=(self.pk if agent else None), | ||||
|             alert_severity=self.alert_severity, | ||||
|             email_alert=self.email_alert, | ||||
|             text_alert=self.text_alert, | ||||
|             dashboard_alert=self.dashboard_alert, | ||||
|             script=self.script, | ||||
|             script_args=self.script_args, | ||||
|             assigned_check=assigned_check, | ||||
| @@ -173,3 +223,215 @@ class AutomatedTask(BaseAuditModel): | ||||
|         ) | ||||
|  | ||||
|         create_win_task_schedule.delay(task.pk) | ||||
|  | ||||
|     def handle_alert(self) -> None: | ||||
|         from alerts.models import Alert, AlertTemplate | ||||
|         from autotasks.tasks import ( | ||||
|             handle_task_email_alert, | ||||
|             handle_task_sms_alert, | ||||
|             handle_resolved_task_sms_alert, | ||||
|             handle_resolved_task_email_alert, | ||||
|         ) | ||||
|  | ||||
|         self.status = "failing" if self.retcode != 0 else "passing" | ||||
|         self.save() | ||||
|  | ||||
|         # return if agent is in maintenance mode | ||||
|         if self.agent.maintenance_mode: | ||||
|             return | ||||
|  | ||||
|         # see if agent has an alert template and use that | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|  | ||||
|         # resolve alert if it exists | ||||
|         if self.status == "passing": | ||||
|             if Alert.objects.filter(assigned_task=self, resolved=False).exists(): | ||||
|                 alert = Alert.objects.get(assigned_task=self, resolved=False) | ||||
|                 alert.resolve() | ||||
|  | ||||
|                 # check if resolved email should be send | ||||
|                 if ( | ||||
|                     not alert.resolved_email_sent | ||||
|                     and self.email_alert | ||||
|                     or alert_template | ||||
|                     and alert_template.task_email_on_resolved | ||||
|                 ): | ||||
|                     handle_resolved_task_email_alert.delay(pk=alert.pk) | ||||
|  | ||||
|                 # check if resolved text should be sent | ||||
|                 if ( | ||||
|                     not alert.resolved_sms_sent | ||||
|                     and self.text_alert | ||||
|                     or alert_template | ||||
|                     and alert_template.task_text_on_resolved | ||||
|                 ): | ||||
|                     handle_resolved_task_sms_alert.delay(pk=alert.pk) | ||||
|  | ||||
|                 # check if resolved script should be run | ||||
|                 if ( | ||||
|                     alert_template | ||||
|                     and alert_template.resolved_action | ||||
|                     and not alert.resolved_action_run | ||||
|                 ): | ||||
|  | ||||
|                     r = self.agent.run_script( | ||||
|                         scriptpk=alert_template.resolved_action.pk, | ||||
|                         args=alert_template.resolved_action_args, | ||||
|                         timeout=alert_template.resolved_action_timeout, | ||||
|                         wait=True, | ||||
|                         full=True, | ||||
|                         run_on_any=True, | ||||
|                     ) | ||||
|  | ||||
|                     # command was successful | ||||
|                     if type(r) == dict: | ||||
|                         alert.resolved_action_retcode = r["retcode"] | ||||
|                         alert.resolved_action_stdout = r["stdout"] | ||||
|                         alert.resolved_action_stderr = r["stderr"] | ||||
|                         alert.resolved_action_execution_time = "{:.4f}".format( | ||||
|                             r["execution_time"] | ||||
|                         ) | ||||
|                         alert.resolved_action_run = djangotime.now() | ||||
|                         alert.save() | ||||
|                     else: | ||||
|                         logger.error( | ||||
|                             f"Resolved action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} resolved alert for task: {self.name}" | ||||
|                         ) | ||||
|  | ||||
|         # create alert if task is failing | ||||
|         else: | ||||
|             if not Alert.objects.filter(assigned_task=self, resolved=False).exists(): | ||||
|                 alert = Alert.create_task_alert(self) | ||||
|             else: | ||||
|                 alert = Alert.objects.get(assigned_task=self, resolved=False) | ||||
|  | ||||
|                 # check if alert severity changed on task and update the alert | ||||
|                 if self.alert_severity != alert.severity: | ||||
|                     alert.severity = self.alert_severity | ||||
|                     alert.save(update_fields=["severity"]) | ||||
|  | ||||
|             # create alert in dashboard if enabled | ||||
|             if ( | ||||
|                 self.dashboard_alert | ||||
|                 or alert_template | ||||
|                 and alert_template.task_always_alert | ||||
|             ): | ||||
|                 alert.hidden = False | ||||
|                 alert.save() | ||||
|  | ||||
|             # send email if enabled | ||||
|             if ( | ||||
|                 not alert.email_sent | ||||
|                 and self.email_alert | ||||
|                 or alert_template | ||||
|                 and self.alert_severity in alert_template.task_email_alert_severity | ||||
|                 and alert_template.check_always_email | ||||
|             ): | ||||
|                 handle_task_email_alert.delay( | ||||
|                     pk=alert.pk, | ||||
|                     alert_template=alert_template.check_periodic_alert_days | ||||
|                     if alert_template | ||||
|                     else None, | ||||
|                 ) | ||||
|  | ||||
|             # send text if enabled | ||||
|             if ( | ||||
|                 not alert.sms_sent | ||||
|                 and self.text_alert | ||||
|                 or alert_template | ||||
|                 and self.alert_severity in alert_template.task_text_alert_severity | ||||
|                 and alert_template.check_always_text | ||||
|             ): | ||||
|                 handle_task_sms_alert.delay( | ||||
|                     pk=alert.pk, | ||||
|                     alert_template=alert_template.check_periodic_alert_days | ||||
|                     if alert_template | ||||
|                     else None, | ||||
|                 ) | ||||
|  | ||||
|             # check if any scripts should be run | ||||
|             if alert_template and alert_template.action and not alert.action_run: | ||||
|                 r = self.agent.run_script( | ||||
|                     scriptpk=alert_template.action.pk, | ||||
|                     args=alert_template.action_args, | ||||
|                     timeout=alert_template.action_timeout, | ||||
|                     wait=True, | ||||
|                     full=True, | ||||
|                     run_on_any=True, | ||||
|                 ) | ||||
|  | ||||
|                 # command was successful | ||||
|                 if type(r) == dict: | ||||
|                     alert.action_retcode = r["retcode"] | ||||
|                     alert.action_stdout = r["stdout"] | ||||
|                     alert.action_stderr = r["stderr"] | ||||
|                     alert.action_execution_time = "{:.4f}".format(r["execution_time"]) | ||||
|                     alert.action_run = djangotime.now() | ||||
|                     alert.save() | ||||
|                 else: | ||||
|                     logger.error( | ||||
|                         f"Failure action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} failure alert for task: {self.name}" | ||||
|                     ) | ||||
|  | ||||
|     def send_email(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|  | ||||
|         if self.agent: | ||||
|             subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed" | ||||
|         else: | ||||
|             subject = f"{self} Failed" | ||||
|  | ||||
|         body = ( | ||||
|             subject | ||||
|             + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" | ||||
|         ) | ||||
|  | ||||
|         CORE.send_mail(subject, body, alert_template) | ||||
|  | ||||
|     def send_sms(self): | ||||
|  | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|  | ||||
|         if self.agent: | ||||
|             subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed" | ||||
|         else: | ||||
|             subject = f"{self} Failed" | ||||
|  | ||||
|         body = ( | ||||
|             subject | ||||
|             + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" | ||||
|         ) | ||||
|  | ||||
|         CORE.send_sms(body, alert_template=alert_template) | ||||
|  | ||||
|     def send_resolved_email(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved" | ||||
|         body = ( | ||||
|             subject | ||||
|             + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" | ||||
|         ) | ||||
|  | ||||
|         CORE.send_mail(subject, body, alert_template=alert_template) | ||||
|  | ||||
|     def send_resolved_sms(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved" | ||||
|         body = ( | ||||
|             subject | ||||
|             + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" | ||||
|         ) | ||||
|         CORE.send_sms(body, alert_template=alert_template) | ||||
|   | ||||
| @@ -6,6 +6,9 @@ from django.conf import settings | ||||
| import pytz | ||||
| from django.utils import timezone as djangotime | ||||
| from packaging import version as pyver | ||||
| from typing import Union | ||||
| import random | ||||
| from time import sleep | ||||
|  | ||||
| from .models import AutomatedTask | ||||
| from logs.models import PendingAction | ||||
| @@ -76,9 +79,14 @@ def create_win_task_schedule(pk, pending_action=False): | ||||
|         return "error" | ||||
|  | ||||
|     r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10)) | ||||
|  | ||||
|     if r != "ok": | ||||
|         # don't create pending action if this task was initiated by a pending action | ||||
|         if not pending_action: | ||||
|  | ||||
|             # complete any other pending actions on agent with same task_id | ||||
|             task.agent.remove_matching_pending_task_actions(task.id) | ||||
|  | ||||
|             PendingAction( | ||||
|                 agent=task.agent, | ||||
|                 action_type="taskaction", | ||||
| @@ -144,6 +152,7 @@ def enable_or_disable_win_task(pk, action, pending_action=False): | ||||
|  | ||||
|     task.sync_status = "synced" | ||||
|     task.save(update_fields=["sync_status"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @@ -157,9 +166,13 @@ def delete_win_task_schedule(pk, pending_action=False): | ||||
|     } | ||||
|     r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10)) | ||||
|  | ||||
|     if r != "ok": | ||||
|     if r != "ok" and "The system cannot find the file specified" not in r: | ||||
|         # don't create pending action if this task was initiated by a pending action | ||||
|         if not pending_action: | ||||
|  | ||||
|             # complete any other pending actions on agent with same task_id | ||||
|             task.agent.remove_matching_pending_task_actions(task.id) | ||||
|  | ||||
|             PendingAction( | ||||
|                 agent=task.agent, | ||||
|                 action_type="taskaction", | ||||
| @@ -168,7 +181,7 @@ def delete_win_task_schedule(pk, pending_action=False): | ||||
|             task.sync_status = "pendingdeletion" | ||||
|             task.save(update_fields=["sync_status"]) | ||||
|  | ||||
|         return | ||||
|         return "timeout" | ||||
|  | ||||
|     # complete pending action since it was successful | ||||
|     if pending_action: | ||||
| @@ -176,6 +189,9 @@ def delete_win_task_schedule(pk, pending_action=False): | ||||
|         pendingaction.status = "completed" | ||||
|         pendingaction.save(update_fields=["status"]) | ||||
|  | ||||
|     # complete any other pending actions on agent with same task_id | ||||
|     task.agent.remove_matching_pending_task_actions(task.id) | ||||
|  | ||||
|     task.delete() | ||||
|     return "ok" | ||||
|  | ||||
| @@ -230,3 +246,85 @@ def remove_orphaned_win_tasks(agentpk): | ||||
|                 logger.info(f"Removed orphaned task {task} from {agent.hostname}") | ||||
|  | ||||
|     logger.info(f"Orphaned task cleanup finished on {agent.hostname}") | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     # first time sending email | ||||
|     if not alert.email_sent: | ||||
|         sleep(random.randint(1, 10)) | ||||
|         alert.assigned_task.send_email() | ||||
|         alert.email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["email_sent"]) | ||||
|     else: | ||||
|         if alert_interval: | ||||
|             # send an email only if the last email sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.email_sent < delta: | ||||
|                 sleep(random.randint(1, 10)) | ||||
|                 alert.assigned_task.send_email() | ||||
|                 alert.email_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["email_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     # first time sending text | ||||
|     if not alert.sms_sent: | ||||
|         sleep(random.randint(1, 3)) | ||||
|         alert.assigned_task.send_sms() | ||||
|         alert.sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["sms_sent"]) | ||||
|     else: | ||||
|         if alert_interval: | ||||
|             # send a text only if the last text sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.sms_sent < delta: | ||||
|                 sleep(random.randint(1, 3)) | ||||
|                 alert.assigned_task.send_sms() | ||||
|                 alert.sms_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["sms_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_resolved_task_sms_alert(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     # first time sending text | ||||
|     if not alert.resolved_sms_sent: | ||||
|         sleep(random.randint(1, 3)) | ||||
|         alert.assigned_task.send_resolved_sms() | ||||
|         alert.resolved_sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["resolved_sms_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_resolved_task_email_alert(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     # first time sending email | ||||
|     if not alert.resolved_email_sent: | ||||
|         sleep(random.randint(1, 10)) | ||||
|         alert.assigned_task.send_resolved_email() | ||||
|         alert.resolved_email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["resolved_email_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|   | ||||
| @@ -150,7 +150,9 @@ class TestAutotaskViews(TacticalTestCase): | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         update_policy_task_fields_task.assert_called_with(policy_task.id, True) | ||||
|         update_policy_task_fields_task.assert_called_with( | ||||
|             policy_task.id, update_agent=True | ||||
|         ) | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
|  | ||||
|   | ||||
| @@ -81,6 +81,20 @@ class AutoTask(APIView): | ||||
|         } | ||||
|         return Response(AutoTaskSerializer(agent, context=ctx).data) | ||||
|  | ||||
|     def put(self, request, pk): | ||||
|         from automation.tasks import update_policy_task_fields_task | ||||
|  | ||||
|         task = get_object_or_404(AutomatedTask, pk=pk) | ||||
|  | ||||
|         serializer = TaskSerializer(instance=task, data=request.data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save() | ||||
|  | ||||
|         if task.policy: | ||||
|             update_policy_task_fields_task.delay(task.pk) | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|     def patch(self, request, pk): | ||||
|         from automation.tasks import update_policy_task_fields_task | ||||
|  | ||||
| @@ -93,7 +107,7 @@ class AutoTask(APIView): | ||||
|                 enable_or_disable_win_task.delay(pk=task.pk, action=action) | ||||
|  | ||||
|             else: | ||||
|                 update_policy_task_fields_task.delay(task.pk, action) | ||||
|                 update_policy_task_fields_task.delay(task.pk, update_agent=True) | ||||
|  | ||||
|             task.enabled = action | ||||
|             task.save(update_fields=["enabled"]) | ||||
|   | ||||
| @@ -1,5 +1,6 @@ | ||||
| from django.contrib import admin | ||||
|  | ||||
| from .models import Check | ||||
| from .models import Check, CheckHistory | ||||
|  | ||||
| admin.site.register(Check) | ||||
| admin.site.register(CheckHistory) | ||||
|   | ||||
| @@ -1,15 +1,20 @@ | ||||
| from .models import Check | ||||
| from model_bakery.recipe import Recipe, seq | ||||
| from model_bakery.recipe import Recipe | ||||
|  | ||||
| check = Recipe(Check) | ||||
| check = Recipe("checks.Check") | ||||
|  | ||||
| diskspace_check = check.extend(check_type="diskspace", disk="C:", threshold=75) | ||||
| diskspace_check = check.extend( | ||||
|     check_type="diskspace", disk="C:", warning_threshold=30, error_threshold=75 | ||||
| ) | ||||
|  | ||||
| cpuload_check = check.extend(check_type="cpuload", threshold=75) | ||||
| cpuload_check = check.extend( | ||||
|     check_type="cpuload", warning_threshold=30, error_threshold=75 | ||||
| ) | ||||
|  | ||||
| ping_check = check.extend(check_type="ping", ip="10.10.10.10") | ||||
|  | ||||
| memory_check = check.extend(check_type="memory", threshold=75) | ||||
| memory_check = check.extend( | ||||
|     check_type="memory", warning_threshold=30, error_threshold=75 | ||||
| ) | ||||
|  | ||||
| winsvc_check = check.extend( | ||||
|     check_type="winsvc", | ||||
|   | ||||
							
								
								
									
										30
									
								
								api/tacticalrmm/checks/migrations/0011_check_run_history.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								api/tacticalrmm/checks/migrations/0011_check_run_history.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,30 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-09 02:56 | ||||
|  | ||||
| import django.contrib.postgres.fields | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("checks", "0010_auto_20200922_1344"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="check", | ||||
|             name="run_history", | ||||
|             field=django.contrib.postgres.fields.ArrayField( | ||||
|                 base_field=django.contrib.postgres.fields.ArrayField( | ||||
|                     base_field=models.PositiveIntegerField(), | ||||
|                     blank=True, | ||||
|                     null=True, | ||||
|                     size=None, | ||||
|                 ), | ||||
|                 blank=True, | ||||
|                 default=list, | ||||
|                 null=True, | ||||
|                 size=None, | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										39
									
								
								api/tacticalrmm/checks/migrations/0011_checkhistory.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										39
									
								
								api/tacticalrmm/checks/migrations/0011_checkhistory.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,39 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-09 21:36 | ||||
|  | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("checks", "0010_auto_20200922_1344"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.CreateModel( | ||||
|             name="CheckHistory", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         auto_created=True, | ||||
|                         primary_key=True, | ||||
|                         serialize=False, | ||||
|                         verbose_name="ID", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("x", models.DateTimeField()), | ||||
|                 ("y", models.PositiveIntegerField()), | ||||
|                 ("results", models.JSONField(blank=True, null=True)), | ||||
|                 ( | ||||
|                     "check_history", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         related_name="check_history", | ||||
|                         to="checks.check", | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										18
									
								
								api/tacticalrmm/checks/migrations/0012_auto_20210110_0503.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								api/tacticalrmm/checks/migrations/0012_auto_20210110_0503.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-10 05:03 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("checks", "0011_checkhistory"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="checkhistory", | ||||
|             name="y", | ||||
|             field=models.PositiveIntegerField(blank=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										18
									
								
								api/tacticalrmm/checks/migrations/0013_auto_20210110_0505.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								api/tacticalrmm/checks/migrations/0013_auto_20210110_0505.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-10 05:05 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("checks", "0012_auto_20210110_0503"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="checkhistory", | ||||
|             name="y", | ||||
|             field=models.PositiveIntegerField(null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,13 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-10 18:08 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("checks", "0013_auto_20210110_0505"), | ||||
|         ("checks", "0011_check_run_history"), | ||||
|     ] | ||||
|  | ||||
|     operations = [] | ||||
							
								
								
									
										27
									
								
								api/tacticalrmm/checks/migrations/0015_auto_20210110_1808.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										27
									
								
								api/tacticalrmm/checks/migrations/0015_auto_20210110_1808.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,27 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-10 18:08 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("checks", "0014_merge_20210110_1808"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name="check", | ||||
|             name="run_history", | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="checkhistory", | ||||
|             name="x", | ||||
|             field=models.DateTimeField(auto_now_add=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="checkhistory", | ||||
|             name="y", | ||||
|             field=models.PositiveIntegerField(blank=True, default=None, null=True), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										43
									
								
								api/tacticalrmm/checks/migrations/0016_auto_20210123_0149.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								api/tacticalrmm/checks/migrations/0016_auto_20210123_0149.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,43 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-23 01:49 | ||||
|  | ||||
| import django.contrib.postgres.fields | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0015_auto_20210110_1808'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='check', | ||||
|             name='threshold', | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='alert_severity', | ||||
|             field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='warning', max_length=15), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='error_threshold', | ||||
|             field=models.PositiveIntegerField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='info_return_codes', | ||||
|             field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveIntegerField(), blank=True, default=list, null=True, size=None), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='warning_return_codes', | ||||
|             field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveIntegerField(), blank=True, default=list, null=True, size=None), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='warning_threshold', | ||||
|             field=models.PositiveIntegerField(blank=True, default=0, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-29 21:11 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0016_auto_20210123_0149'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='dashboard_alert', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										18
									
								
								api/tacticalrmm/checks/migrations/0018_auto_20210205_1647.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								api/tacticalrmm/checks/migrations/0018_auto_20210205_1647.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-05 16:47 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0017_check_dashboard_alert'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='check', | ||||
|             name='alert_severity', | ||||
|             field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='warning', max_length=15, null=True), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										23
									
								
								api/tacticalrmm/checks/migrations/0019_auto_20210205_1728.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								api/tacticalrmm/checks/migrations/0019_auto_20210205_1728.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-05 17:28 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0018_auto_20210205_1647'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='resolved_email_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='resolved_text_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										29
									
								
								api/tacticalrmm/checks/migrations/0020_auto_20210210_1512.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								api/tacticalrmm/checks/migrations/0020_auto_20210210_1512.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-10 15:12 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0019_auto_20210205_1728'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='check', | ||||
|             name='email_sent', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='check', | ||||
|             name='resolved_email_sent', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='check', | ||||
|             name='resolved_text_sent', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='check', | ||||
|             name='text_sent', | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										24
									
								
								api/tacticalrmm/checks/migrations/0021_auto_20210212_1429.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								api/tacticalrmm/checks/migrations/0021_auto_20210212_1429.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-12 14:29 | ||||
|  | ||||
| import django.core.validators | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0020_auto_20210210_1512'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='check', | ||||
|             name='error_threshold', | ||||
|             field=models.PositiveIntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(99)]), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='check', | ||||
|             name='warning_threshold', | ||||
|             field=models.PositiveIntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(99)]), | ||||
|         ), | ||||
|     ] | ||||
| @@ -5,15 +5,29 @@ import json | ||||
| import pytz | ||||
| from statistics import mean | ||||
|  | ||||
| from django.utils import timezone as djangotime | ||||
| from django.db import models | ||||
| from django.conf import settings | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.core.validators import MinValueValidator, MaxValueValidator | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from rest_framework.fields import JSONField | ||||
| from typing import List, Any | ||||
| from typing import Union | ||||
|  | ||||
| from loguru import logger | ||||
|  | ||||
| from core.models import CoreSettings | ||||
| from logs.models import BaseAuditModel | ||||
| from .tasks import handle_check_email_alert_task, handle_check_sms_alert_task | ||||
| from .tasks import ( | ||||
|     handle_check_email_alert_task, | ||||
|     handle_check_sms_alert_task, | ||||
|     handle_resolved_check_email_alert_task, | ||||
|     handle_resolved_check_sms_alert_task, | ||||
| ) | ||||
| from .utils import bytes2human | ||||
| from alerts.models import SEVERITY_CHOICES | ||||
|  | ||||
| logger.configure(**settings.LOG_CONFIG) | ||||
|  | ||||
| CHECK_TYPE_CHOICES = [ | ||||
|     ("diskspace", "Disk Space Check"), | ||||
| @@ -83,18 +97,34 @@ class Check(BaseAuditModel): | ||||
|     last_run = models.DateTimeField(null=True, blank=True) | ||||
|     email_alert = models.BooleanField(default=False) | ||||
|     text_alert = models.BooleanField(default=False) | ||||
|     dashboard_alert = models.BooleanField(default=False) | ||||
|     fails_b4_alert = models.PositiveIntegerField(default=1) | ||||
|     fail_count = models.PositiveIntegerField(default=0) | ||||
|     email_sent = models.DateTimeField(null=True, blank=True) | ||||
|     text_sent = models.DateTimeField(null=True, blank=True) | ||||
|     outage_history = models.JSONField(null=True, blank=True)  # store | ||||
|     extra_details = models.JSONField(null=True, blank=True) | ||||
|  | ||||
|     # check specific fields | ||||
|  | ||||
|     # for eventlog, script, ip, and service alert severity | ||||
|     alert_severity = models.CharField( | ||||
|         max_length=15, | ||||
|         choices=SEVERITY_CHOICES, | ||||
|         default="warning", | ||||
|         null=True, | ||||
|         blank=True, | ||||
|     ) | ||||
|  | ||||
|     # threshold percent for diskspace, cpuload or memory check | ||||
|     threshold = models.PositiveIntegerField( | ||||
|         null=True, blank=True, validators=[MinValueValidator(1), MaxValueValidator(99)] | ||||
|     error_threshold = models.PositiveIntegerField( | ||||
|         validators=[MinValueValidator(0), MaxValueValidator(99)], | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=0, | ||||
|     ) | ||||
|     warning_threshold = models.PositiveIntegerField( | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         validators=[MinValueValidator(0), MaxValueValidator(99)], | ||||
|         default=0, | ||||
|     ) | ||||
|     # diskcheck i.e C:, D: etc | ||||
|     disk = models.CharField(max_length=2, null=True, blank=True) | ||||
| @@ -114,6 +144,18 @@ class Check(BaseAuditModel): | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     info_return_codes = ArrayField( | ||||
|         models.PositiveIntegerField(), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     warning_return_codes = ArrayField( | ||||
|         models.PositiveIntegerField(), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     timeout = models.PositiveIntegerField(null=True, blank=True) | ||||
|     stdout = models.TextField(null=True, blank=True) | ||||
|     stderr = models.TextField(null=True, blank=True) | ||||
| @@ -158,11 +200,25 @@ class Check(BaseAuditModel): | ||||
|     @property | ||||
|     def readable_desc(self): | ||||
|         if self.check_type == "diskspace": | ||||
|             return f"{self.get_check_type_display()}: Drive {self.disk} < {self.threshold}%" | ||||
|  | ||||
|             text = "" | ||||
|             if self.warning_threshold: | ||||
|                 text += f" Warning Threshold: {self.warning_threshold}%" | ||||
|             if self.error_threshold: | ||||
|                 text += f" Error Threshold: {self.error_threshold}%" | ||||
|  | ||||
|             return f"{self.get_check_type_display()}: Drive {self.disk} < {text}" | ||||
|         elif self.check_type == "ping": | ||||
|             return f"{self.get_check_type_display()}: {self.name}" | ||||
|         elif self.check_type == "cpuload" or self.check_type == "memory": | ||||
|             return f"{self.get_check_type_display()} > {self.threshold}%" | ||||
|  | ||||
|             text = "" | ||||
|             if self.warning_threshold: | ||||
|                 text += f" Warning Threshold: {self.warning_threshold}%" | ||||
|             if self.error_threshold: | ||||
|                 text += f" Error Threshold: {self.error_threshold}%" | ||||
|  | ||||
|             return f"{self.get_check_type_display()} > {text}" | ||||
|         elif self.check_type == "winsvc": | ||||
|             return f"{self.get_check_type_display()}: {self.svc_display_name}" | ||||
|         elif self.check_type == "eventlog": | ||||
| @@ -187,15 +243,13 @@ class Check(BaseAuditModel): | ||||
|         return self.last_run | ||||
|  | ||||
|     @property | ||||
|     def non_editable_fields(self): | ||||
|     def non_editable_fields(self) -> List[str]: | ||||
|         return [ | ||||
|             "check_type", | ||||
|             "status", | ||||
|             "more_info", | ||||
|             "last_run", | ||||
|             "fail_count", | ||||
|             "email_sent", | ||||
|             "text_sent", | ||||
|             "outage_history", | ||||
|             "extra_details", | ||||
|             "stdout", | ||||
| @@ -214,7 +268,148 @@ class Check(BaseAuditModel): | ||||
|             "modified_time", | ||||
|         ] | ||||
|  | ||||
|     def handle_alert(self) -> None: | ||||
|         from alerts.models import Alert, AlertTemplate | ||||
|  | ||||
|         # return if agent is in maintenance mode | ||||
|         if self.agent.maintenance_mode: | ||||
|             return | ||||
|  | ||||
|         # see if agent has an alert template and use that | ||||
|         alert_template: Union[AlertTemplate, None] = self.agent.get_alert_template() | ||||
|  | ||||
|         # resolve alert if it exists | ||||
|         if self.status == "passing": | ||||
|             if Alert.objects.filter(assigned_check=self, resolved=False).exists(): | ||||
|                 alert = Alert.objects.get(assigned_check=self, resolved=False) | ||||
|                 alert.resolve() | ||||
|  | ||||
|                 # check if a resolved email notification should be send | ||||
|                 if ( | ||||
|                     alert_template | ||||
|                     and alert_template.check_email_on_resolved | ||||
|                     and not alert.resolved_email_sent | ||||
|                 ): | ||||
|                     handle_resolved_check_email_alert_task.delay(pk=alert.pk) | ||||
|  | ||||
|                 # check if resolved text should be sent | ||||
|                 if ( | ||||
|                     alert_template | ||||
|                     and alert_template.check_text_on_resolved | ||||
|                     and not alert.resolved_sms_sent | ||||
|                 ): | ||||
|                     handle_resolved_check_sms_alert_task.delay(pk=alert.pk) | ||||
|  | ||||
|                 # check if resolved script should be run | ||||
|                 if ( | ||||
|                     alert_template | ||||
|                     and alert_template.resolved_action | ||||
|                     and not alert.resolved_action_run | ||||
|                 ): | ||||
|                     r = self.agent.run_script( | ||||
|                         scriptpk=alert_template.resolved_action.pk, | ||||
|                         args=alert_template.resolved_action_args, | ||||
|                         timeout=alert_template.resolved_action_timeout, | ||||
|                         wait=True, | ||||
|                         full=True, | ||||
|                         run_on_any=True, | ||||
|                     ) | ||||
|  | ||||
|                     # command was successful | ||||
|                     if type(r) == dict: | ||||
|                         alert.resolved_action_retcode = r["retcode"] | ||||
|                         alert.resolved_action_stdout = r["stdout"] | ||||
|                         alert.resolved_action_stderr = r["stderr"] | ||||
|                         alert.resolved_action_execution_time = "{:.4f}".format( | ||||
|                             r["execution_time"] | ||||
|                         ) | ||||
|                         alert.resolved_action_run = djangotime.now() | ||||
|                         alert.save() | ||||
|                     else: | ||||
|                         logger.error( | ||||
|                             f"Resolved action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} resolved alert for {self.check_type} check" | ||||
|                         ) | ||||
|  | ||||
|         elif self.fail_count >= self.fails_b4_alert: | ||||
|             if not Alert.objects.filter(assigned_check=self, resolved=False).exists(): | ||||
|                 alert = Alert.create_check_alert(self) | ||||
|             else: | ||||
|                 alert = Alert.objects.get(assigned_check=self, resolved=False) | ||||
|  | ||||
|                 # check if alert severity changed on check and update the alert | ||||
|                 if self.alert_severity != alert.severity: | ||||
|                     alert.severity = self.alert_severity | ||||
|                     alert.save(update_fields=["severity"]) | ||||
|  | ||||
|             # create alert in dashboard if enabled | ||||
|             if ( | ||||
|                 self.dashboard_alert | ||||
|                 or alert_template | ||||
|                 and self.alert_severity in alert_template.check_dashboard_alert_severity | ||||
|                 and alert_template.check_always_alert | ||||
|             ): | ||||
|                 alert.hidden = False | ||||
|                 alert.save() | ||||
|  | ||||
|             # send email if enabled | ||||
|             if ( | ||||
|                 not alert.email_sent | ||||
|                 and self.email_alert | ||||
|                 or alert_template | ||||
|                 and self.alert_severity in alert_template.check_email_alert_severity | ||||
|                 and alert_template.check_always_email | ||||
|             ): | ||||
|                 handle_check_email_alert_task.delay( | ||||
|                     pk=alert.pk, | ||||
|                     alert_interval=alert_template.check_periodic_alert_days | ||||
|                     if alert_template | ||||
|                     else None, | ||||
|                 ) | ||||
|  | ||||
|             # send text if enabled | ||||
|             if ( | ||||
|                 not alert.sms_sent | ||||
|                 and self.text_alert | ||||
|                 or alert_template | ||||
|                 and self.alert_severity in alert_template.check_text_alert_severity | ||||
|                 and alert_template.check_always_text | ||||
|             ): | ||||
|                 handle_check_sms_alert_task.delay( | ||||
|                     pk=alert.pk, | ||||
|                     alert_interval=alert_template.check_periodic_alert_days | ||||
|                     if alert_template | ||||
|                     else None, | ||||
|                 ) | ||||
|  | ||||
|             # check if any scripts should be run | ||||
|             if alert_template and alert_template.action and not alert.action_run: | ||||
|                 r = self.agent.run_script( | ||||
|                     scriptpk=alert_template.action.pk, | ||||
|                     args=alert_template.action_args, | ||||
|                     timeout=alert_template.action_timeout, | ||||
|                     wait=True, | ||||
|                     full=True, | ||||
|                     run_on_any=True, | ||||
|                 ) | ||||
|  | ||||
|                 # command was successful | ||||
|                 if type(r) == dict: | ||||
|                     alert.action_retcode = r["retcode"] | ||||
|                     alert.action_stdout = r["stdout"] | ||||
|                     alert.action_stderr = r["stderr"] | ||||
|                     alert.action_execution_time = "{:.4f}".format(r["execution_time"]) | ||||
|                     alert.action_run = djangotime.now() | ||||
|                     alert.save() | ||||
|                 else: | ||||
|                     logger.error( | ||||
|                         f"Failure action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} failure alert for {self.check_type} check{r}" | ||||
|                     ) | ||||
|  | ||||
|     def add_check_history(self, value: int, more_info: Any = None) -> None: | ||||
|         CheckHistory.objects.create(check_history=self, y=value, results=more_info) | ||||
|  | ||||
|     def handle_checkv2(self, data): | ||||
|  | ||||
|         # cpuload or mem checks | ||||
|         if self.check_type == "cpuload" or self.check_type == "memory": | ||||
|  | ||||
| @@ -227,11 +422,18 @@ class Check(BaseAuditModel): | ||||
|  | ||||
|             avg = int(mean(self.history)) | ||||
|  | ||||
|             if avg > self.threshold: | ||||
|             if self.error_threshold and avg > self.error_threshold: | ||||
|                 self.status = "failing" | ||||
|                 self.alert_severity = "error" | ||||
|             elif self.warning_threshold and avg > self.warning_threshold: | ||||
|                 self.status = "failing" | ||||
|                 self.alert_severity = "warning" | ||||
|             else: | ||||
|                 self.status = "passing" | ||||
|  | ||||
|             # add check history | ||||
|             self.add_check_history(data["percent"]) | ||||
|  | ||||
|         # diskspace checks | ||||
|         elif self.check_type == "diskspace": | ||||
|             if data["exists"]: | ||||
| @@ -239,14 +441,26 @@ class Check(BaseAuditModel): | ||||
|                 total = bytes2human(data["total"]) | ||||
|                 free = bytes2human(data["free"]) | ||||
|  | ||||
|                 if (100 - percent_used) < self.threshold: | ||||
|                 if self.error_threshold and (100 - percent_used) < self.error_threshold: | ||||
|                     self.status = "failing" | ||||
|                     self.alert_severity = "error" | ||||
|                 elif ( | ||||
|                     self.warning_threshold | ||||
|                     and (100 - percent_used) < self.warning_threshold | ||||
|                 ): | ||||
|                     self.status = "failing" | ||||
|                     self.alert_severity = "warning" | ||||
|  | ||||
|                 else: | ||||
|                     self.status = "passing" | ||||
|  | ||||
|                 self.more_info = f"Total: {total}B, Free: {free}B" | ||||
|  | ||||
|                 # add check history | ||||
|                 self.add_check_history(100 - percent_used) | ||||
|             else: | ||||
|                 self.status = "failing" | ||||
|                 self.alert_severity = "error" | ||||
|                 self.more_info = f"Disk {self.disk} does not exist" | ||||
|  | ||||
|             self.save(update_fields=["more_info"]) | ||||
| @@ -263,8 +477,15 @@ class Check(BaseAuditModel): | ||||
|                 # golang agent | ||||
|                 self.execution_time = "{:.4f}".format(data["runtime"]) | ||||
|  | ||||
|             if data["retcode"] != 0: | ||||
|             if data["retcode"] in self.info_return_codes: | ||||
|                 self.alert_severity = "info" | ||||
|                 self.status = "failing" | ||||
|             elif data["retcode"] in self.warning_return_codes: | ||||
|                 self.alert_severity = "warning" | ||||
|                 self.status = "failing" | ||||
|             elif data["retcode"] != 0: | ||||
|                 self.status = "failing" | ||||
|                 self.alert_severity = "error" | ||||
|             else: | ||||
|                 self.status = "passing" | ||||
|  | ||||
| @@ -277,6 +498,17 @@ class Check(BaseAuditModel): | ||||
|                 ] | ||||
|             ) | ||||
|  | ||||
|             # add check history | ||||
|             self.add_check_history( | ||||
|                 1 if self.status == "failing" else 0, | ||||
|                 { | ||||
|                     "retcode": data["retcode"], | ||||
|                     "stdout": data["stdout"][:60], | ||||
|                     "stderr": data["stderr"][:60], | ||||
|                     "execution_time": self.execution_time, | ||||
|                 }, | ||||
|             ) | ||||
|  | ||||
|         # ping checks | ||||
|         elif self.check_type == "ping": | ||||
|             success = ["Reply", "bytes", "time", "TTL"] | ||||
| @@ -293,6 +525,10 @@ class Check(BaseAuditModel): | ||||
|             self.more_info = output | ||||
|             self.save(update_fields=["more_info"]) | ||||
|  | ||||
|             self.add_check_history( | ||||
|                 1 if self.status == "failing" else 0, self.more_info[:60] | ||||
|             ) | ||||
|  | ||||
|         # windows service checks | ||||
|         elif self.check_type == "winsvc": | ||||
|             svc_stat = data["status"] | ||||
| @@ -332,6 +568,10 @@ class Check(BaseAuditModel): | ||||
|  | ||||
|             self.save(update_fields=["more_info"]) | ||||
|  | ||||
|             self.add_check_history( | ||||
|                 1 if self.status == "failing" else 0, self.more_info[:60] | ||||
|             ) | ||||
|  | ||||
|         elif self.check_type == "eventlog": | ||||
|             log = [] | ||||
|             is_wildcard = self.event_id_is_wildcard | ||||
| @@ -391,62 +631,24 @@ class Check(BaseAuditModel): | ||||
|             self.extra_details = {"log": log} | ||||
|             self.save(update_fields=["extra_details"]) | ||||
|  | ||||
|             self.add_check_history( | ||||
|                 1 if self.status == "failing" else 0, | ||||
|                 "Events Found:" + str(len(self.extra_details["log"])), | ||||
|             ) | ||||
|  | ||||
|         # handle status | ||||
|         if self.status == "failing": | ||||
|             self.fail_count += 1 | ||||
|             self.save(update_fields=["status", "fail_count"]) | ||||
|             self.save(update_fields=["status", "fail_count", "alert_severity"]) | ||||
|  | ||||
|         elif self.status == "passing": | ||||
|             if self.fail_count != 0: | ||||
|                 self.fail_count = 0 | ||||
|                 self.save(update_fields=["status", "fail_count"]) | ||||
|             else: | ||||
|                 self.save(update_fields=["status"]) | ||||
|             self.fail_count = 0 | ||||
|             self.save(update_fields=["status", "fail_count", "alert_severity"]) | ||||
|  | ||||
|         if self.fail_count >= self.fails_b4_alert: | ||||
|             if self.email_alert: | ||||
|                 handle_check_email_alert_task.delay(self.pk) | ||||
|             if self.text_alert: | ||||
|                 handle_check_sms_alert_task.delay(self.pk) | ||||
|         self.handle_alert() | ||||
|  | ||||
|         return self.status | ||||
|  | ||||
|     def handle_check(self, data): | ||||
|         if self.check_type != "cpuload" and self.check_type != "memory": | ||||
|  | ||||
|             if data["status"] == "passing" and self.fail_count != 0: | ||||
|                 self.fail_count = 0 | ||||
|                 self.save(update_fields=["fail_count"]) | ||||
|  | ||||
|             elif data["status"] == "failing": | ||||
|                 self.fail_count += 1 | ||||
|                 self.save(update_fields=["fail_count"]) | ||||
|  | ||||
|         else: | ||||
|             self.history.append(data["percent"]) | ||||
|  | ||||
|             if len(self.history) > 15: | ||||
|                 self.history = self.history[-15:] | ||||
|  | ||||
|             self.save(update_fields=["history"]) | ||||
|  | ||||
|             avg = int(mean(self.history)) | ||||
|  | ||||
|             if avg > self.threshold: | ||||
|                 self.status = "failing" | ||||
|                 self.fail_count += 1 | ||||
|                 self.save(update_fields=["status", "fail_count"]) | ||||
|             else: | ||||
|                 self.status = "passing" | ||||
|                 if self.fail_count != 0: | ||||
|                     self.fail_count = 0 | ||||
|                     self.save(update_fields=["status", "fail_count"]) | ||||
|                 else: | ||||
|                     self.save(update_fields=["status"]) | ||||
|  | ||||
|         if self.email_alert and self.fail_count >= self.fails_b4_alert: | ||||
|             handle_check_email_alert_task.delay(self.pk) | ||||
|  | ||||
|     @staticmethod | ||||
|     def serialize(check): | ||||
|         # serializes the check and returns json | ||||
| @@ -480,17 +682,22 @@ class Check(BaseAuditModel): | ||||
|             managed_by_policy=bool(agent), | ||||
|             parent_check=(self.pk if agent else None), | ||||
|             name=self.name, | ||||
|             alert_severity=self.alert_severity, | ||||
|             check_type=self.check_type, | ||||
|             email_alert=self.email_alert, | ||||
|             dashboard_alert=self.dashboard_alert, | ||||
|             text_alert=self.text_alert, | ||||
|             fails_b4_alert=self.fails_b4_alert, | ||||
|             extra_details=self.extra_details, | ||||
|             threshold=self.threshold, | ||||
|             error_threshold=self.error_threshold, | ||||
|             warning_threshold=self.warning_threshold, | ||||
|             disk=self.disk, | ||||
|             ip=self.ip, | ||||
|             script=self.script, | ||||
|             script_args=self.script_args, | ||||
|             timeout=self.timeout, | ||||
|             info_return_codes=self.info_return_codes, | ||||
|             warning_return_codes=self.warning_return_codes, | ||||
|             svc_name=self.svc_name, | ||||
|             svc_display_name=self.svc_display_name, | ||||
|             pass_if_start_pending=self.pass_if_start_pending, | ||||
| @@ -532,19 +739,27 @@ class Check(BaseAuditModel): | ||||
|     def send_email(self): | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|  | ||||
|         body: str = "" | ||||
|         if self.agent: | ||||
|             subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed" | ||||
|         else: | ||||
|             subject = f"{self} Failed" | ||||
|  | ||||
|         if self.check_type == "diskspace": | ||||
|             text = "" | ||||
|             if self.warning_threshold: | ||||
|                 text += f" Warning Threshold: {self.warning_threshold}%" | ||||
|             if self.error_threshold: | ||||
|                 text += f" Error Threshold: {self.error_threshold}%" | ||||
|  | ||||
|             percent_used = [ | ||||
|                 d["percent"] for d in self.agent.disks if d["device"] == self.disk | ||||
|             ][0] | ||||
|             percent_free = 100 - percent_used | ||||
|  | ||||
|             body = subject + f" - Free: {percent_free}%, Threshold: {self.threshold}%" | ||||
|             body = subject + f" - Free: {percent_free}%, {text}" | ||||
|  | ||||
|         elif self.check_type == "script": | ||||
|  | ||||
| @@ -558,26 +773,29 @@ class Check(BaseAuditModel): | ||||
|             body = self.more_info | ||||
|  | ||||
|         elif self.check_type == "cpuload" or self.check_type == "memory": | ||||
|             text = "" | ||||
|             if self.warning_threshold: | ||||
|                 text += f" Warning Threshold: {self.warning_threshold}%" | ||||
|             if self.error_threshold: | ||||
|                 text += f" Error Threshold: {self.error_threshold}%" | ||||
|  | ||||
|             avg = int(mean(self.history)) | ||||
|  | ||||
|             if self.check_type == "cpuload": | ||||
|                 body = ( | ||||
|                     subject | ||||
|                     + f" - Average CPU utilization: {avg}%, Threshold: {self.threshold}%" | ||||
|                 ) | ||||
|                 body = subject + f" - Average CPU utilization: {avg}%, {text}" | ||||
|  | ||||
|             elif self.check_type == "memory": | ||||
|                 body = ( | ||||
|                     subject | ||||
|                     + f" - Average memory usage: {avg}%, Threshold: {self.threshold}%" | ||||
|                 ) | ||||
|                 body = subject + f" - Average memory usage: {avg}%, {text}" | ||||
|  | ||||
|         elif self.check_type == "winsvc": | ||||
|  | ||||
|             status = list( | ||||
|                 filter(lambda x: x["name"] == self.svc_name, self.agent.services) | ||||
|             )[0]["status"] | ||||
|             try: | ||||
|                 status = list( | ||||
|                     filter(lambda x: x["name"] == self.svc_name, self.agent.services) | ||||
|                 )[0]["status"] | ||||
|             # catch services that don't exist if policy check | ||||
|             except: | ||||
|                 status = "Unknown" | ||||
|  | ||||
|             body = subject + f" - Status: {status.upper()}" | ||||
|  | ||||
| @@ -603,11 +821,13 @@ class Check(BaseAuditModel): | ||||
|                 except: | ||||
|                     continue | ||||
|  | ||||
|         CORE.send_mail(subject, body) | ||||
|         CORE.send_mail(subject, body, alert_template=alert_template) | ||||
|  | ||||
|     def send_sms(self): | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|         body: str = "" | ||||
|  | ||||
|         if self.agent: | ||||
|             subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed" | ||||
| @@ -615,27 +835,33 @@ class Check(BaseAuditModel): | ||||
|             subject = f"{self} Failed" | ||||
|  | ||||
|         if self.check_type == "diskspace": | ||||
|             text = "" | ||||
|             if self.warning_threshold: | ||||
|                 text += f" Warning Threshold: {self.warning_threshold}%" | ||||
|             if self.error_threshold: | ||||
|                 text += f" Error Threshold: {self.error_threshold}%" | ||||
|  | ||||
|             percent_used = [ | ||||
|                 d["percent"] for d in self.agent.disks if d["device"] == self.disk | ||||
|             ][0] | ||||
|             percent_free = 100 - percent_used | ||||
|             body = subject + f" - Free: {percent_free}%, Threshold: {self.threshold}%" | ||||
|             body = subject + f" - Free: {percent_free}%, {text}" | ||||
|         elif self.check_type == "script": | ||||
|             body = subject + f" - Return code: {self.retcode}" | ||||
|         elif self.check_type == "ping": | ||||
|             body = subject | ||||
|         elif self.check_type == "cpuload" or self.check_type == "memory": | ||||
|             text = "" | ||||
|             if self.warning_threshold: | ||||
|                 text += f" Warning Threshold: {self.warning_threshold}%" | ||||
|             if self.error_threshold: | ||||
|                 text += f" Error Threshold: {self.error_threshold}%" | ||||
|  | ||||
|             avg = int(mean(self.history)) | ||||
|             if self.check_type == "cpuload": | ||||
|                 body = ( | ||||
|                     subject | ||||
|                     + f" - Average CPU utilization: {avg}%, Threshold: {self.threshold}%" | ||||
|                 ) | ||||
|                 body = subject + f" - Average CPU utilization: {avg}%, {text}" | ||||
|             elif self.check_type == "memory": | ||||
|                 body = ( | ||||
|                     subject | ||||
|                     + f" - Average memory usage: {avg}%, Threshold: {self.threshold}%" | ||||
|                 ) | ||||
|                 body = subject + f" - Average memory usage: {avg}%, {text}" | ||||
|         elif self.check_type == "winsvc": | ||||
|             status = list( | ||||
|                 filter(lambda x: x["name"] == self.svc_name, self.agent.services) | ||||
| @@ -644,4 +870,32 @@ class Check(BaseAuditModel): | ||||
|         elif self.check_type == "eventlog": | ||||
|             body = subject | ||||
|  | ||||
|         CORE.send_sms(body) | ||||
|         CORE.send_sms(body, alert_template=alert_template) | ||||
|  | ||||
|     def send_resolved_email(self): | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|         subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved" | ||||
|         body = f"{self} is now back to normal" | ||||
|  | ||||
|         CORE.send_mail(subject, body, alert_template=alert_template) | ||||
|  | ||||
|     def send_resolved_sms(self): | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|         subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved" | ||||
|         CORE.send_sms(subject, alert_template=alert_template) | ||||
|  | ||||
|  | ||||
| class CheckHistory(models.Model): | ||||
|     check_history = models.ForeignKey( | ||||
|         Check, | ||||
|         related_name="check_history", | ||||
|         on_delete=models.CASCADE, | ||||
|     ) | ||||
|     x = models.DateTimeField(auto_now_add=True) | ||||
|     y = models.PositiveIntegerField(null=True, blank=True, default=None) | ||||
|     results = models.JSONField(null=True, blank=True) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.check_history.readable_desc | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| import validators as _v | ||||
|  | ||||
| import pytz | ||||
| from rest_framework import serializers | ||||
|  | ||||
| from .models import Check | ||||
| from .models import Check, CheckHistory | ||||
| from autotasks.models import AutomatedTask | ||||
| from scripts.serializers import ScriptSerializer, ScriptCheckSerializer | ||||
|  | ||||
| @@ -40,19 +40,35 @@ class CheckSerializer(serializers.ModelSerializer): | ||||
|             check_type = val["check_type"] | ||||
|         except KeyError: | ||||
|             return val | ||||
|  | ||||
|         # disk checks | ||||
|         # make sure no duplicate diskchecks exist for an agent/policy | ||||
|         if check_type == "diskspace" and not self.instance:  # only on create | ||||
|             checks = ( | ||||
|                 Check.objects.filter(**self.context) | ||||
|                 .filter(check_type="diskspace") | ||||
|                 .exclude(managed_by_policy=True) | ||||
|             ) | ||||
|             for check in checks: | ||||
|                 if val["disk"] in check.disk: | ||||
|                     raise serializers.ValidationError( | ||||
|                         f"A disk check for Drive {val['disk']} already exists!" | ||||
|                     ) | ||||
|         if check_type == "diskspace": | ||||
|             if not self.instance:  # only on create | ||||
|                 checks = ( | ||||
|                     Check.objects.filter(**self.context) | ||||
|                     .filter(check_type="diskspace") | ||||
|                     .exclude(managed_by_policy=True) | ||||
|                 ) | ||||
|                 for check in checks: | ||||
|                     if val["disk"] in check.disk: | ||||
|                         raise serializers.ValidationError( | ||||
|                             f"A disk check for Drive {val['disk']} already exists!" | ||||
|                         ) | ||||
|  | ||||
|             if not val["warning_threshold"] and not val["error_threshold"]: | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"Warning threshold or Error Threshold must be set" | ||||
|                 ) | ||||
|  | ||||
|             if ( | ||||
|                 val["warning_threshold"] < val["error_threshold"] | ||||
|                 and val["warning_threshold"] > 0 | ||||
|                 and val["error_threshold"] > 0 | ||||
|             ): | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"Warning threshold must be greater than Error Threshold" | ||||
|                 ) | ||||
|  | ||||
|         # ping checks | ||||
|         if check_type == "ping": | ||||
| @@ -65,6 +81,54 @@ class CheckSerializer(serializers.ModelSerializer): | ||||
|                     "Please enter a valid IP address or domain name" | ||||
|                 ) | ||||
|  | ||||
|         if check_type == "cpuload" and not self.instance: | ||||
|             if ( | ||||
|                 Check.objects.filter(**self.context, check_type="cpuload") | ||||
|                 .exclude(managed_by_policy=True) | ||||
|                 .exists() | ||||
|             ): | ||||
|                 raise serializers.ValidationError( | ||||
|                     "A cpuload check for this agent already exists" | ||||
|                 ) | ||||
|  | ||||
|             if not val["warning_threshold"] and not val["error_threshold"]: | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"Warning threshold or Error Threshold must be set" | ||||
|                 ) | ||||
|  | ||||
|             if ( | ||||
|                 val["warning_threshold"] > val["error_threshold"] | ||||
|                 and val["warning_threshold"] > 0 | ||||
|                 and val["error_threshold"] > 0 | ||||
|             ): | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"Warning threshold must be less than Error Threshold" | ||||
|                 ) | ||||
|  | ||||
|         if check_type == "memory" and not self.instance: | ||||
|             if ( | ||||
|                 Check.objects.filter(**self.context, check_type="memory") | ||||
|                 .exclude(managed_by_policy=True) | ||||
|                 .exists() | ||||
|             ): | ||||
|                 raise serializers.ValidationError( | ||||
|                     "A memory check for this agent already exists" | ||||
|                 ) | ||||
|  | ||||
|             if not val["warning_threshold"] and not val["error_threshold"]: | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"Warning threshold or Error Threshold must be set" | ||||
|                 ) | ||||
|  | ||||
|             if ( | ||||
|                 val["warning_threshold"] > val["error_threshold"] | ||||
|                 and val["warning_threshold"] > 0 | ||||
|                 and val["error_threshold"] > 0 | ||||
|             ): | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"Warning threshold must be less than Error Threshold" | ||||
|                 ) | ||||
|  | ||||
|         return val | ||||
|  | ||||
|  | ||||
| @@ -75,101 +139,7 @@ class AssignedTaskCheckRunnerField(serializers.ModelSerializer): | ||||
|  | ||||
|  | ||||
| class CheckRunnerGetSerializer(serializers.ModelSerializer): | ||||
|     # for the windows agent | ||||
|     # only send data needed for agent to run a check | ||||
|  | ||||
|     assigned_task = serializers.SerializerMethodField() | ||||
|     script = ScriptSerializer(read_only=True) | ||||
|  | ||||
|     def get_assigned_task(self, obj): | ||||
|         if obj.assignedtask.exists(): | ||||
|             # this will not break agents on version 0.10.2 or lower | ||||
|             # newer agents once released will properly handle multiple tasks assigned to a check | ||||
|             task = obj.assignedtask.first() | ||||
|             return AssignedTaskCheckRunnerField(task).data | ||||
|  | ||||
|     class Meta: | ||||
|         model = Check | ||||
|         exclude = [ | ||||
|             "policy", | ||||
|             "managed_by_policy", | ||||
|             "overriden_by_policy", | ||||
|             "parent_check", | ||||
|             "name", | ||||
|             "more_info", | ||||
|             "last_run", | ||||
|             "email_alert", | ||||
|             "text_alert", | ||||
|             "fails_b4_alert", | ||||
|             "fail_count", | ||||
|             "email_sent", | ||||
|             "text_sent", | ||||
|             "outage_history", | ||||
|             "extra_details", | ||||
|             "stdout", | ||||
|             "stderr", | ||||
|             "retcode", | ||||
|             "execution_time", | ||||
|             "svc_display_name", | ||||
|             "svc_policy_mode", | ||||
|             "created_by", | ||||
|             "created_time", | ||||
|             "modified_by", | ||||
|             "modified_time", | ||||
|             "history", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class CheckRunnerGetSerializerV2(serializers.ModelSerializer): | ||||
|     # for the windows __python__ agent | ||||
|     # only send data needed for agent to run a check | ||||
|  | ||||
|     assigned_tasks = serializers.SerializerMethodField() | ||||
|     script = ScriptSerializer(read_only=True) | ||||
|  | ||||
|     def get_assigned_tasks(self, obj): | ||||
|         if obj.assignedtask.exists(): | ||||
|             tasks = obj.assignedtask.all() | ||||
|             return AssignedTaskCheckRunnerField(tasks, many=True).data | ||||
|  | ||||
|     class Meta: | ||||
|         model = Check | ||||
|         exclude = [ | ||||
|             "policy", | ||||
|             "managed_by_policy", | ||||
|             "overriden_by_policy", | ||||
|             "parent_check", | ||||
|             "name", | ||||
|             "more_info", | ||||
|             "last_run", | ||||
|             "email_alert", | ||||
|             "text_alert", | ||||
|             "fails_b4_alert", | ||||
|             "fail_count", | ||||
|             "email_sent", | ||||
|             "text_sent", | ||||
|             "outage_history", | ||||
|             "extra_details", | ||||
|             "stdout", | ||||
|             "stderr", | ||||
|             "retcode", | ||||
|             "execution_time", | ||||
|             "svc_display_name", | ||||
|             "svc_policy_mode", | ||||
|             "created_by", | ||||
|             "created_time", | ||||
|             "modified_by", | ||||
|             "modified_time", | ||||
|             "history", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class CheckRunnerGetSerializerV3(serializers.ModelSerializer): | ||||
|     # for the windows __golang__ agent | ||||
|     # only send data needed for agent to run a check | ||||
|     # the difference here is in the script serializer | ||||
|     # script checks no longer rely on salt and are executed directly by the go agent | ||||
|  | ||||
|     assigned_tasks = serializers.SerializerMethodField() | ||||
|     script = ScriptCheckSerializer(read_only=True) | ||||
|  | ||||
| @@ -192,8 +162,6 @@ class CheckRunnerGetSerializerV3(serializers.ModelSerializer): | ||||
|             "text_alert", | ||||
|             "fails_b4_alert", | ||||
|             "fail_count", | ||||
|             "email_sent", | ||||
|             "text_sent", | ||||
|             "outage_history", | ||||
|             "extra_details", | ||||
|             "stdout", | ||||
| @@ -217,3 +185,15 @@ class CheckResultsSerializer(serializers.ModelSerializer): | ||||
|     class Meta: | ||||
|         model = Check | ||||
|         fields = "__all__" | ||||
|  | ||||
|  | ||||
| class CheckHistorySerializer(serializers.ModelSerializer): | ||||
|     x = serializers.SerializerMethodField() | ||||
|  | ||||
|     def get_x(self, obj): | ||||
|         return obj.x.astimezone(pytz.timezone(self.context["timezone"])).isoformat() | ||||
|  | ||||
|     # used for return large amounts of graph data | ||||
|     class Meta: | ||||
|         model = CheckHistory | ||||
|         fields = ("x", "y", "results") | ||||
|   | ||||
| @@ -1,58 +1,101 @@ | ||||
| import datetime as dt | ||||
| import random | ||||
| from time import sleep | ||||
| from typing import Union | ||||
|  | ||||
| from tacticalrmm.celery import app | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from agents.models import Agent | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_check_email_alert_task(pk): | ||||
|     from .models import Check | ||||
| def handle_check_email_alert_task(pk, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     check = Check.objects.get(pk=pk) | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     if not check.agent.maintenance_mode: | ||||
|         # first time sending email | ||||
|         if not check.email_sent: | ||||
|             sleep(random.randint(1, 10)) | ||||
|             check.send_email() | ||||
|             check.email_sent = djangotime.now() | ||||
|             check.save(update_fields=["email_sent"]) | ||||
|         else: | ||||
|             # send an email only if the last email sent is older than 24 hours | ||||
|             delta = djangotime.now() - dt.timedelta(hours=24) | ||||
|             if check.email_sent < delta: | ||||
|     # first time sending email | ||||
|     if not alert.email_sent: | ||||
|         sleep(random.randint(1, 10)) | ||||
|         alert.assigned_check.send_email() | ||||
|         alert.email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["email_sent"]) | ||||
|     else: | ||||
|         if alert_interval: | ||||
|             # send an email only if the last email sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.email_sent < delta: | ||||
|                 sleep(random.randint(1, 10)) | ||||
|                 check.send_email() | ||||
|                 check.email_sent = djangotime.now() | ||||
|                 check.save(update_fields=["email_sent"]) | ||||
|                 alert.assigned_check.send_email() | ||||
|                 alert.email_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["email_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_check_sms_alert_task(pk): | ||||
|     from .models import Check | ||||
| def handle_check_sms_alert_task(pk, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     check = Check.objects.get(pk=pk) | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     if not check.agent.maintenance_mode: | ||||
|         # first time sending text | ||||
|         if not check.text_sent: | ||||
|             sleep(random.randint(1, 3)) | ||||
|             check.send_sms() | ||||
|             check.text_sent = djangotime.now() | ||||
|             check.save(update_fields=["text_sent"]) | ||||
|         else: | ||||
|     # first time sending text | ||||
|     if not alert.sms_sent: | ||||
|         sleep(random.randint(1, 3)) | ||||
|         alert.assigned_check.send_sms() | ||||
|         alert.sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["sms_sent"]) | ||||
|     else: | ||||
|         if alert_interval: | ||||
|             # send a text only if the last text sent is older than 24 hours | ||||
|             delta = djangotime.now() - dt.timedelta(hours=24) | ||||
|             if check.text_sent < delta: | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.sms_sent < delta: | ||||
|                 sleep(random.randint(1, 3)) | ||||
|                 check.send_sms() | ||||
|                 check.text_sent = djangotime.now() | ||||
|                 check.save(update_fields=["text_sent"]) | ||||
|                 alert.assigned_check.send_sms() | ||||
|                 alert.sms_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["sms_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_resolved_check_sms_alert_task(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     # first time sending text | ||||
|     if not alert.resolved_sms_sent: | ||||
|         sleep(random.randint(1, 3)) | ||||
|         alert.assigned_check.send_resolved_sms() | ||||
|         alert.resolved_sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["resolved_sms_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_resolved_check_email_alert_task(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     # first time sending email | ||||
|     if not alert.resolved_email_sent: | ||||
|         sleep(random.randint(1, 10)) | ||||
|         alert.assigned_check.send_resolved_email() | ||||
|         alert.resolved_email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["resolved_email_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def prune_check_history(older_than_days: int) -> str: | ||||
|     from .models import CheckHistory | ||||
|  | ||||
|     CheckHistory.objects.filter( | ||||
|         x__lt=djangotime.make_aware(dt.datetime.today()) | ||||
|         - djangotime.timedelta(days=older_than_days) | ||||
|     ).delete() | ||||
|  | ||||
|     return "ok" | ||||
|   | ||||
| @@ -1,13 +1,16 @@ | ||||
| from checks.models import CheckHistory | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
| from .serializers import CheckSerializer | ||||
| from django.utils import timezone as djangotime | ||||
| from unittest.mock import patch | ||||
|  | ||||
| from model_bakery import baker | ||||
| from itertools import cycle | ||||
|  | ||||
|  | ||||
| class TestCheckViews(TacticalTestCase): | ||||
|     def setUp(self): | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|  | ||||
|     def test_get_disk_check(self): | ||||
|         # setup data | ||||
| @@ -20,7 +23,7 @@ class TestCheckViews(TacticalTestCase): | ||||
|  | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|         self.check_not_authenticated("post", url) | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_add_disk_check(self): | ||||
|         # setup data | ||||
| @@ -33,7 +36,8 @@ class TestCheckViews(TacticalTestCase): | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "disk": "C:", | ||||
|                 "threshold": 55, | ||||
|                 "error_threshold": 55, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 3, | ||||
|             }, | ||||
|         } | ||||
| @@ -47,7 +51,8 @@ class TestCheckViews(TacticalTestCase): | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "disk": "C:", | ||||
|                 "threshold": 55, | ||||
|                 "error_threshold": 55, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 3, | ||||
|             }, | ||||
|         } | ||||
| @@ -55,6 +60,144 @@ class TestCheckViews(TacticalTestCase): | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # this should fail because both error and warning threshold are 0 | ||||
|         invalid_payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "disk": "C:", | ||||
|                 "error_threshold": 0, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 3, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # this should fail because both error is greater than warning threshold | ||||
|         invalid_payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "disk": "C:", | ||||
|                 "error_threshold": 50, | ||||
|                 "warning_threshold": 30, | ||||
|                 "fails_b4_alert": 3, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     def test_add_cpuload_check(self): | ||||
|         url = "/checks/checks/" | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "cpuload", | ||||
|                 "error_threshold": 66, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         payload["error_threshold"] = 87 | ||||
|         resp = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|         self.assertEqual( | ||||
|             resp.json()["non_field_errors"][0], | ||||
|             "A cpuload check for this agent already exists", | ||||
|         ) | ||||
|  | ||||
|         # should fail because both error and warning thresholds are 0 | ||||
|         invalid_payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "cpuload", | ||||
|                 "error_threshold": 0, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # should fail because error is less than warning | ||||
|         invalid_payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "cpuload", | ||||
|                 "error_threshold": 10, | ||||
|                 "warning_threshold": 50, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     def test_add_memory_check(self): | ||||
|         url = "/checks/checks/" | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "memory", | ||||
|                 "error_threshold": 78, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 1, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         payload["error_threshold"] = 55 | ||||
|         resp = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|         self.assertEqual( | ||||
|             resp.json()["non_field_errors"][0], | ||||
|             "A memory check for this agent already exists", | ||||
|         ) | ||||
|  | ||||
|         # should fail because both error and warning thresholds are 0 | ||||
|         invalid_payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "memory", | ||||
|                 "error_threshold": 0, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # should fail because error is less than warning | ||||
|         invalid_payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "memory", | ||||
|                 "error_threshold": 10, | ||||
|                 "warning_threshold": 50, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|     def test_get_policy_disk_check(self): | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy") | ||||
| @@ -80,11 +223,37 @@ class TestCheckViews(TacticalTestCase): | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "disk": "M:", | ||||
|                 "threshold": 86, | ||||
|                 "error_threshold": 86, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 2, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         # should fail because both error and warning thresholds are 0 | ||||
|         invalid_payload = { | ||||
|             "policy": policy.pk, | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "error_threshold": 0, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # should fail because warning is less than error | ||||
|         invalid_payload = { | ||||
|             "policy": policy.pk, | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "error_threshold": 80, | ||||
|                 "warning_threshold": 50, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, valid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
| @@ -94,7 +263,8 @@ class TestCheckViews(TacticalTestCase): | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "disk": "M:", | ||||
|                 "threshold": 34, | ||||
|                 "error_threshold": 34, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
| @@ -134,3 +304,111 @@ class TestCheckViews(TacticalTestCase): | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         self.check_not_authenticated("patch", url_a) | ||||
|  | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     def test_run_checks(self, nats_cmd): | ||||
|         agent = baker.make_recipe("agents.agent", version="1.4.1") | ||||
|         agent_old = baker.make_recipe("agents.agent", version="1.0.2") | ||||
|         agent_b4_141 = baker.make_recipe("agents.agent", version="1.4.0") | ||||
|  | ||||
|         url = f"/checks/runchecks/{agent_old.pk}/" | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|         self.assertEqual(r.json(), "Requires agent version 1.1.0 or greater") | ||||
|  | ||||
|         url = f"/checks/runchecks/{agent_b4_141.pk}/" | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         nats_cmd.assert_called_with({"func": "runchecks"}, wait=False) | ||||
|  | ||||
|         nats_cmd.reset_mock() | ||||
|         nats_cmd.return_value = "busy" | ||||
|         url = f"/checks/runchecks/{agent.pk}/" | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|         nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15) | ||||
|         self.assertEqual(r.json(), f"Checks are already running on {agent.hostname}") | ||||
|  | ||||
|         nats_cmd.reset_mock() | ||||
|         nats_cmd.return_value = "ok" | ||||
|         url = f"/checks/runchecks/{agent.pk}/" | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15) | ||||
|         self.assertEqual(r.json(), f"Checks will now be re-run on {agent.hostname}") | ||||
|  | ||||
|         nats_cmd.reset_mock() | ||||
|         nats_cmd.return_value = "timeout" | ||||
|         url = f"/checks/runchecks/{agent.pk}/" | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|         nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15) | ||||
|         self.assertEqual(r.json(), "Unable to contact the agent") | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_get_check_history(self): | ||||
|         # setup data | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         check = baker.make_recipe("checks.diskspace_check", agent=agent) | ||||
|         baker.make("checks.CheckHistory", check_history=check, _quantity=30) | ||||
|         check_history_data = baker.make( | ||||
|             "checks.CheckHistory", | ||||
|             check_history=check, | ||||
|             _quantity=30, | ||||
|         ) | ||||
|  | ||||
|         # need to manually set the date back 35 days | ||||
|         for check_history in check_history_data: | ||||
|             check_history.x = djangotime.now() - djangotime.timedelta(days=35) | ||||
|             check_history.save() | ||||
|  | ||||
|         # test invalid check pk | ||||
|         resp = self.client.patch("/checks/history/500/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         url = f"/checks/history/{check.id}/" | ||||
|  | ||||
|         # test with timeFilter last 30 days | ||||
|         data = {"timeFilter": 30} | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(len(resp.data), 30) | ||||
|  | ||||
|         # test with timeFilter equal to 0 | ||||
|         data = {"timeFilter": 0} | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(len(resp.data), 60) | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
|  | ||||
|  | ||||
| class TestCheckTasks(TacticalTestCase): | ||||
|     def setUp(self): | ||||
|         self.setup_coresettings() | ||||
|  | ||||
|     def test_prune_check_history(self): | ||||
|         from .tasks import prune_check_history | ||||
|  | ||||
|         # setup data | ||||
|         check = baker.make_recipe("checks.diskspace_check") | ||||
|         baker.make("checks.CheckHistory", check_history=check, _quantity=30) | ||||
|         check_history_data = baker.make( | ||||
|             "checks.CheckHistory", | ||||
|             check_history=check, | ||||
|             _quantity=30, | ||||
|         ) | ||||
|  | ||||
|         # need to manually set the date back 35 days | ||||
|         for check_history in check_history_data: | ||||
|             check_history.x = djangotime.now() - djangotime.timedelta(days=35) | ||||
|             check_history.save() | ||||
|  | ||||
|         # prune data 30 days old | ||||
|         prune_check_history(30) | ||||
|         self.assertEqual(CheckHistory.objects.count(), 30) | ||||
|  | ||||
|         # prune all Check history Data | ||||
|         prune_check_history(0) | ||||
|         self.assertEqual(CheckHistory.objects.count(), 0) | ||||
|   | ||||
| @@ -7,4 +7,5 @@ urlpatterns = [ | ||||
|     path("<pk>/loadchecks/", views.load_checks), | ||||
|     path("getalldisks/", views.get_disks_for_policies), | ||||
|     path("runchecks/<pk>/", views.run_checks), | ||||
|     path("history/<int:checkpk>/", views.CheckHistory.as_view()), | ||||
| ] | ||||
|   | ||||
| @@ -1,6 +1,11 @@ | ||||
| import asyncio | ||||
| from packaging import version as pyver | ||||
|  | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.db.models import Q | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from datetime import datetime as dt | ||||
|  | ||||
| from rest_framework.views import APIView | ||||
| from rest_framework.response import Response | ||||
| @@ -13,7 +18,7 @@ from automation.models import Policy | ||||
| from .models import Check | ||||
| from scripts.models import Script | ||||
|  | ||||
| from .serializers import CheckSerializer | ||||
| from .serializers import CheckSerializer, CheckHistorySerializer | ||||
|  | ||||
|  | ||||
| from automation.tasks import ( | ||||
| @@ -135,14 +140,46 @@ class GetUpdateDeleteCheck(APIView): | ||||
|         return Response(f"{check.readable_desc} was deleted!") | ||||
|  | ||||
|  | ||||
| class CheckHistory(APIView): | ||||
|     def patch(self, request, checkpk): | ||||
|         check = get_object_or_404(Check, pk=checkpk) | ||||
|  | ||||
|         timeFilter = Q() | ||||
|  | ||||
|         if "timeFilter" in request.data: | ||||
|             if request.data["timeFilter"] != 0: | ||||
|                 timeFilter = Q( | ||||
|                     x__lte=djangotime.make_aware(dt.today()), | ||||
|                     x__gt=djangotime.make_aware(dt.today()) | ||||
|                     - djangotime.timedelta(days=request.data["timeFilter"]), | ||||
|                 ) | ||||
|  | ||||
|         check_history = check.check_history.filter(timeFilter).order_by("-x") | ||||
|  | ||||
|         return Response( | ||||
|             CheckHistorySerializer( | ||||
|                 check_history, context={"timezone": check.agent.timezone}, many=True | ||||
|             ).data | ||||
|         ) | ||||
|  | ||||
|  | ||||
| @api_view() | ||||
| def run_checks(request, pk): | ||||
|     agent = get_object_or_404(Agent, pk=pk) | ||||
|     if not agent.has_nats: | ||||
|         return notify_error("Requires agent version 1.1.0 or greater") | ||||
|  | ||||
|     asyncio.run(agent.nats_cmd({"func": "runchecks"}, wait=False)) | ||||
|     return Response(agent.hostname) | ||||
|     if pyver.parse(agent.version) >= pyver.parse("1.4.1"): | ||||
|         r = asyncio.run(agent.nats_cmd({"func": "runchecks"}, timeout=15)) | ||||
|         if r == "busy": | ||||
|             return notify_error(f"Checks are already running on {agent.hostname}") | ||||
|         elif r == "ok": | ||||
|             return Response(f"Checks will now be re-run on {agent.hostname}") | ||||
|         else: | ||||
|             return notify_error("Unable to contact the agent") | ||||
|     else: | ||||
|         asyncio.run(agent.nats_cmd({"func": "runchecks"}, wait=False)) | ||||
|         return Response(f"Checks will now be re-run on {agent.hostname}") | ||||
|  | ||||
|  | ||||
| @api_view() | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user