Compare commits
	
		
			393 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 0dc749bb3d | ||
|  | a8aedfde55 | ||
|  | b174a89032 | ||
|  | 9b92d1b673 | ||
|  | febc9aed11 | ||
|  | de2462677e | ||
|  | 8bd94d46eb | ||
|  | d43cefe28f | ||
|  | b82874e261 | ||
|  | 8554cb5d6c | ||
|  | f901614056 | ||
|  | b555d217ab | ||
|  | 775c600234 | ||
|  | 128f2570b8 | ||
|  | 3cd53e79b4 | ||
|  | ebba84ffda | ||
|  | 1e1a42fe98 | ||
|  | 8a744a440d | ||
|  | f4fc3c7d55 | ||
|  | 0594d121de | ||
|  | 12c85d6234 | ||
|  | 5e37728f66 | ||
|  | e8e19fede7 | ||
|  | e565dbfa66 | ||
|  | d180d6820c | ||
|  | 7f252e9b7c | ||
|  | 41db8681f8 | ||
|  | 26cd58fd6d | ||
|  | 63c7e1aa9d | ||
|  | d5a6063e5e | ||
|  | 00affdbdec | ||
|  | db3f0bbd4f | ||
|  | 020a59cb97 | ||
|  | ff4fa6402d | ||
|  | 80f7555499 | ||
|  | 10cc187c5d | ||
|  | def4a8a67e | ||
|  | 25843edb48 | ||
|  | 54294141b0 | ||
|  | f3a8886b50 | ||
|  | 268cfaf234 | ||
|  | 651ae20304 | ||
|  | e22f69a5dc | ||
|  | a39808f44c | ||
|  | fcb541f734 | ||
|  | 79ca0f1684 | ||
|  | f2ebc38044 | ||
|  | d4335675f1 | ||
|  | be4b05423e | ||
|  | d9fe8db2a7 | ||
|  | f92e780765 | ||
|  | 7aebdb7c78 | ||
|  | abb2dd842b | ||
|  | 75713c8015 | ||
|  | 42e1717455 | ||
|  | bfb19a9eb7 | ||
|  | 3e08585114 | ||
|  | 12e82c7a8d | ||
|  | 0fcc683903 | ||
|  | ed7a8dc0f5 | ||
|  | 0a9d29c98d | ||
|  | f63e801608 | ||
|  | 77f04e1a32 | ||
|  | 362819ce16 | ||
|  | 1d9165a627 | ||
|  | 7ee8aaa027 | ||
|  | 516e279fc3 | ||
|  | 880611eddb | ||
|  | c4bf776069 | ||
|  | 097d6464c0 | ||
|  | b86e4e017f | ||
|  | bbec17d498 | ||
|  | 3b7b5f4ec3 | ||
|  | 0986efef29 | ||
|  | 06091cbf1c | ||
|  | b588bab268 | ||
|  | 0736cfe959 | ||
|  | 400352254a | ||
|  | 259c3dc781 | ||
|  | 506055a815 | ||
|  | 3edf6c57ba | ||
|  | c404ae7ac8 | ||
|  | 312774e472 | ||
|  | c540f802b0 | ||
|  | 6a2a2761e1 | ||
|  | 2508458c80 | ||
|  | 025d9e0141 | ||
|  | 734b3b07ab | ||
|  | e4250a857a | ||
|  | 56d1b2716c | ||
|  | c5d7e61e6c | ||
|  | 6222a127bd | ||
|  | f0b7e515b6 | ||
|  | 98d8c23868 | ||
|  | 978bb9afd0 | ||
|  | 058598b5f3 | ||
|  | 5b7ab3a10f | ||
|  | e42243c78b | ||
|  | c650ee8498 | ||
|  | 50f8968901 | ||
|  | b0fa2e6d80 | ||
|  | d59589425e | ||
|  | 6c810e514b | ||
|  | efa41dbd22 | ||
|  | f34bcfd56d | ||
|  | 8ff2e3fb29 | ||
|  | 033c04a0f2 | ||
|  | 6ae2da22c1 | ||
|  | cef1ab9512 | ||
|  | 94f02bfca3 | ||
|  | a941bb1744 | ||
|  | 6ff591427a | ||
|  | 809e172280 | ||
|  | 17aedae0a9 | ||
|  | ef817ccb3a | ||
|  | 0fb55b0bee | ||
|  | a1a6eddc31 | ||
|  | ff3d0b6b57 | ||
|  | dd64cef4c4 | ||
|  | 9796848079 | ||
|  | fea7eb4312 | ||
|  | c12cd0e755 | ||
|  | d86a72f858 | ||
|  | 50cd7f219a | ||
|  | 8252b3eccc | ||
|  | d0c6e3a158 | ||
|  | 1505fa547e | ||
|  | 9017bad884 | ||
|  | 2ac5e316a5 | ||
|  | 29f9113062 | ||
|  | 46349672d8 | ||
|  | 4787be2db0 | ||
|  | f0a8c5d732 | ||
|  | 9ad520bf7c | ||
|  | bd0cc51554 | ||
|  | 12f599f974 | ||
|  | 0118d5fb40 | ||
|  | 65cadb311a | ||
|  | dd75bd197d | ||
|  | 7e155bdb43 | ||
|  | 993b6fddf4 | ||
|  | 6ba51df6a7 | ||
|  | 1185ac58e1 | ||
|  | f835997f49 | ||
|  | a597dba775 | ||
|  | 3194e83a66 | ||
|  | 096c3cdd34 | ||
|  | 3a1ea42333 | ||
|  | 64877d4299 | ||
|  | e957dc5e2c | ||
|  | 578d5c5830 | ||
|  | 96284f9508 | ||
|  | 698b38dcba | ||
|  | 6db826befe | ||
|  | 1a3d412d73 | ||
|  | b8461c9dd8 | ||
|  | 699bd9de10 | ||
|  | 54b6866e21 | ||
|  | afd155e9c1 | ||
|  | 910a717230 | ||
|  | 70fbd33d61 | ||
|  | 2da0d5ee21 | ||
|  | 98f64e057a | ||
|  | 3d9d936c56 | ||
|  | 2b4cb59df8 | ||
|  | 9d80da52e3 | ||
|  | fd176d2c64 | ||
|  | 538b6de36b | ||
|  | f7eca8aee0 | ||
|  | a754d94c2c | ||
|  | 5e3493e6a9 | ||
|  | 619a14c26b | ||
|  | 7d9a8decf0 | ||
|  | d11e14ad89 | ||
|  | 69189cf2af | ||
|  | 6e7d2f19d2 | ||
|  | d99ebf5d6a | ||
|  | ef2d19e95b | ||
|  | e3a66f017e | ||
|  | 9e544ad471 | ||
|  | 5f19aa527a | ||
|  | bfd5bc5c26 | ||
|  | 2d0ec3accd | ||
|  | 0999d98225 | ||
|  | d8dd3e133f | ||
|  | 528470c37f | ||
|  | c03cd53853 | ||
|  | b57fc8a29c | ||
|  | a04ed5c3ca | ||
|  | 3ad1df14f6 | ||
|  | d8caf12fdc | ||
|  | 5ca9d30d5f | ||
|  | a7a71b4a46 | ||
|  | 638603ac6b | ||
|  | 1d70c15027 | ||
|  | 7a5f03d672 | ||
|  | 39e97c5589 | ||
|  | 1943d8367e | ||
|  | f91c5af9a1 | ||
|  | 2be71fc877 | ||
|  | f5f5b4a8db | ||
|  | ac9cfd09ea | ||
|  | 4cfc85dbfd | ||
|  | 1f3d2f47b1 | ||
|  | 653c482ff7 | ||
|  | 4b069cc2b0 | ||
|  | c89349a43a | ||
|  | 6e92d6c62c | ||
|  | 5d3d3e9076 | ||
|  | b440c772d6 | ||
|  | 2895560b30 | ||
|  | bedcecb2e1 | ||
|  | 656ac829a4 | ||
|  | 4d83debc0e | ||
|  | 4ff5d19979 | ||
|  | 2216ee422e | ||
|  | 9acda5696e | ||
|  | dc6255048a | ||
|  | 2acde429d7 | ||
|  | efcac1adac | ||
|  | 81d5ecd758 | ||
|  | d9ff004454 | ||
|  | d57135d793 | ||
|  | bb5a0023af | ||
|  | e3c25a167e | ||
|  | 5be93ae17d | ||
|  | 3a2511d4a1 | ||
|  | 8ec7d98eef | ||
|  | 9421ae25f7 | ||
|  | 5b288b6fa1 | ||
|  | d35ed2980b | ||
|  | 6d8df6d2b9 | ||
|  | a839513f7f | ||
|  | 97b37b4742 | ||
|  | 4894031219 | ||
|  | 8985b5511c | ||
|  | b3c2a6a0cc | ||
|  | 7291b440bb | ||
|  | d75f134677 | ||
|  | e60069ec1d | ||
|  | 034f49573d | ||
|  | 973d37a237 | ||
|  | d2ec609e68 | ||
|  | 6b410399cd | ||
|  | 0c010570b9 | ||
|  | 78fc7faa13 | ||
|  | 7671cce263 | ||
|  | a43a66a2d3 | ||
|  | 2190a2ed25 | ||
|  | 227636b705 | ||
|  | 5032170362 | ||
|  | b94c3961eb | ||
|  | 46c7e89a94 | ||
|  | 80861fd620 | ||
|  | 44f9390790 | ||
|  | 8eca6c409a | ||
|  | 4907c01191 | ||
|  | 04bf314c61 | ||
|  | 57d92b276b | ||
|  | 6a8efddab5 | ||
|  | fd908494ae | ||
|  | d617b23c2f | ||
|  | 27874728bc | ||
|  | 56a0345260 | ||
|  | c412839165 | ||
|  | b77f927ad5 | ||
|  | 8edd7f6a56 | ||
|  | c6915d0291 | ||
|  | 388eb94014 | ||
|  | 9ab80553e1 | ||
|  | 86d639ee6a | ||
|  | 979fd8a249 | ||
|  | e65ab58f84 | ||
|  | 8414bdbab1 | ||
|  | d037b09128 | ||
|  | 9a687fec9b | ||
|  | e9d71f169c | ||
|  | e09c307d58 | ||
|  | d23d641b1b | ||
|  | b1301091f9 | ||
|  | 2458eb3960 | ||
|  | fa836d88c7 | ||
|  | e26349f2fc | ||
|  | daa4e4d566 | ||
|  | 8e75df686d | ||
|  | 53537e7b3a | ||
|  | 4beddc2271 | ||
|  | a6e4a774e0 | ||
|  | dacc1c5770 | ||
|  | 25e922bc4c | ||
|  | c877c9b0fb | ||
|  | 56bb206f25 | ||
|  | 740a9ceaa7 | ||
|  | 64e936127a | ||
|  | bd4549f389 | ||
|  | b1f7bd3ead | ||
|  | b5e3b16e3a | ||
|  | 96a72a2cd7 | ||
|  | c155da858e | ||
|  | 5e20a5cd71 | ||
|  | c1b2bbd152 | ||
|  | e3b5f418d6 | ||
|  | f82b589d03 | ||
|  | cddac4d0fb | ||
|  | dd6f92e54d | ||
|  | 5d4558bddf | ||
|  | 5aa7b5a337 | ||
|  | 2fe0b5b90d | ||
|  | aa6997990c | ||
|  | c02ab50a0a | ||
|  | 7cb16b2259 | ||
|  | 3173dc83a5 | ||
|  | baddc29bb8 | ||
|  | 612cbe6be4 | ||
|  | 4c1d2ab1bb | ||
|  | 6b4704b2e2 | ||
|  | c2286cde01 | ||
|  | 24a17712e7 | ||
|  | 27d537e7bb | ||
|  | dbd89c72a3 | ||
|  | ff41bbd0e5 | ||
|  | 4bdb6ae84e | ||
|  | cece7b79ad | ||
|  | 8d09d95fc3 | ||
|  | 752542a1d1 | ||
|  | dd077383f7 | ||
|  | 6e808dbb0f | ||
|  | 4ef3441f70 | ||
|  | 82624d6657 | ||
|  | 62e2b5230c | ||
|  | 3325c30f29 | ||
|  | 18a06168f1 | ||
|  | 27e93e499f | ||
|  | 90644a21a3 | ||
|  | 7e31f43ef1 | ||
|  | b13fc1fba4 | ||
|  | 5d9109e526 | ||
|  | 78dfa36b2a | ||
|  | dc05d87b44 | ||
|  | 2c323a13c1 | ||
|  | d4c5e38857 | ||
|  | fb80e5c367 | ||
|  | beb08a3afb | ||
|  | 7b2de8cbbd | ||
|  | 83e63bc87c | ||
|  | 4f5da33fd6 | ||
|  | d00d003a67 | ||
|  | 002f24be10 | ||
|  | 04992a1d95 | ||
|  | 3c7cf2446e | ||
|  | 29774ac014 | ||
|  | 562d580987 | ||
|  | d8ad6c0cb0 | ||
|  | 7897b0ebe9 | ||
|  | e38af9fd16 | ||
|  | 6ffdf5c251 | ||
|  | 69ef7676af | ||
|  | b0ac57040c | ||
|  | 826ac7f185 | ||
|  | 0623f53f5d | ||
|  | b5ae875589 | ||
|  | c152e18e1a | ||
|  | 903f0e5e19 | ||
|  | 6fefd5589c | ||
|  | 58fe14bd31 | ||
|  | 97f362ed1e | ||
|  | b63e87ecb6 | ||
|  | ac3550dfd7 | ||
|  | 8278a4cfd9 | ||
|  | f161a2bbc8 | ||
|  | 6a94489df0 | ||
|  | c3a0b9192f | ||
|  | 69ff70a9ce | ||
|  | 5284eb0af8 | ||
|  | 58384ae136 | ||
|  | 054cc78e65 | ||
|  | 8c283281d6 | ||
|  | 241fe41756 | ||
|  | e50e0626fa | ||
|  | c9135f1573 | ||
|  | ec2663a152 | ||
|  | 7567042c8a | ||
|  | c99ceb155f | ||
|  | f44c92f0d3 | ||
|  | 492701ec62 | ||
|  | a6d0acaa4d | ||
|  | f84b4e7274 | ||
|  | b7ef5b82d8 | ||
|  | a854d2c38c | ||
|  | 5140499bbd | ||
|  | 7183e9ee85 | ||
|  | 11885e0aca | ||
|  | 2bda4e822c | 
| @@ -26,7 +26,8 @@ services: | ||||
|     container_name: trmm-app-dev | ||||
|     image: node:16-alpine | ||||
|     restart: always | ||||
|     command: /bin/sh -c "npm install npm@latest -g && npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}" | ||||
|     command: /bin/sh -c "npm install --cache ~/.npm && npm run serve" | ||||
|     user: 1000:1000 | ||||
|     working_dir: /workspace/web | ||||
|     volumes: | ||||
|       - ..:/workspace:cached | ||||
| @@ -105,7 +106,7 @@ services: | ||||
|     image: postgres:13-alpine | ||||
|     restart: always | ||||
|     environment: | ||||
|       POSTGRES_DB: tacticalrmm | ||||
|       POSTGRES_DB: ${POSTGRES_DB} | ||||
|       POSTGRES_USER: ${POSTGRES_USER} | ||||
|       POSTGRES_PASSWORD: ${POSTGRES_PASS} | ||||
|     volumes: | ||||
| @@ -120,7 +121,7 @@ services: | ||||
|     container_name: trmm-redis-dev | ||||
|     restart: always | ||||
|     user: 1000:1000 | ||||
|     command: redis-server --appendonly yes | ||||
|     command: redis-server | ||||
|     image: redis:6.0-alpine | ||||
|     volumes: | ||||
|       - redis-data-dev:/data | ||||
| @@ -145,6 +146,7 @@ services: | ||||
|       TRMM_PASS: ${TRMM_PASS} | ||||
|       HTTP_PROTOCOL: ${HTTP_PROTOCOL} | ||||
|       APP_PORT: ${APP_PORT} | ||||
|       POSTGRES_DB: ${POSTGRES_DB} | ||||
|     depends_on: | ||||
|       - postgres-dev | ||||
|       - meshcentral-dev | ||||
|   | ||||
| @@ -60,6 +60,8 @@ DEBUG = True | ||||
|  | ||||
| DOCKER_BUILD = True | ||||
|  | ||||
| SWAGGER_ENABLED = True | ||||
|  | ||||
| CERT_FILE = '${CERT_PUB_PATH}' | ||||
| KEY_FILE = '${CERT_PRIV_PATH}' | ||||
|  | ||||
| @@ -94,6 +96,7 @@ EOF | ||||
|   echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py | ||||
|  | ||||
|   # run migrations and init scripts | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py pre_update_tasks | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup | ||||
| @@ -143,7 +146,7 @@ if [ "$1" = 'tactical-init-dev' ]; then | ||||
|   webenv="$(cat << EOF | ||||
| PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}" | ||||
| DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}" | ||||
| APP_URL = "https://${APP_HOST}" | ||||
| DEV_PORT = ${APP_PORT} | ||||
| DOCKER_BUILD = 1 | ||||
| EOF | ||||
| )" | ||||
|   | ||||
| @@ -1,36 +1,3 @@ | ||||
| # To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file | ||||
| asgiref==3.5.0 | ||||
| celery==5.2.3 | ||||
| channels==3.0.4 | ||||
| channels_redis==3.3.1 | ||||
| daphne==3.0.2 | ||||
| Django==3.2.12 | ||||
| django-cors-headers==3.11.0 | ||||
| django-ipware==4.0.2 | ||||
| django-rest-knox==4.2.0 | ||||
| djangorestframework==3.13.1 | ||||
| future==0.18.2 | ||||
| msgpack==1.0.3 | ||||
| nats-py==2.0.0 | ||||
| packaging==21.3 | ||||
| psycopg2-binary==2.9.3 | ||||
| pycryptodome==3.14.1 | ||||
| pyotp==2.6.0 | ||||
| pytz==2021.3 | ||||
| qrcode==7.3.1 | ||||
| redis==4.1.3 | ||||
| requests==2.27.1 | ||||
| twilio==7.6.0 | ||||
| urllib3==1.26.8 | ||||
| validators==0.18.2 | ||||
| websockets==10.1 | ||||
| drf_spectacular==0.21.2 | ||||
|  | ||||
| # dev | ||||
| black==22.1.0 | ||||
| Werkzeug==2.0.2 | ||||
| django-extensions==3.1.5 | ||||
| Pygments==2.11.2 | ||||
| isort==5.10.1 | ||||
| mypy==0.931 | ||||
| types-pytz==2021.3.4 | ||||
| -r ../api/tacticalrmm/requirements.txt | ||||
| -r ../api/tacticalrmm/requirements-dev.txt | ||||
| -r ../api/tacticalrmm/requirements-test.txt | ||||
							
								
								
									
										73
									
								
								.github/workflows/ci-tests.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										73
									
								
								.github/workflows/ci-tests.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,73 @@ | ||||
| name: Tests CI | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: | ||||
|       - "*" | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - "*" | ||||
|  | ||||
| jobs: | ||||
|   test: | ||||
|     runs-on: ubuntu-latest | ||||
|     name: Tests | ||||
|     strategy: | ||||
|       matrix: | ||||
|         python-version: ["3.10.4"] | ||||
|  | ||||
|     steps: | ||||
|       - uses: actions/checkout@v3 | ||||
|  | ||||
|       - uses: harmon758/postgresql-action@v1 | ||||
|         with: | ||||
|           postgresql version: "14" | ||||
|           postgresql db: "pipeline" | ||||
|           postgresql user: "pipeline" | ||||
|           postgresql password: "pipeline123456" | ||||
|  | ||||
|       - name: Setup Python ${{ matrix.python-version }} | ||||
|         uses: actions/setup-python@v3 | ||||
|         with: | ||||
|           python-version: ${{ matrix.python-version }} | ||||
|  | ||||
|       - name: Install redis | ||||
|         run: | | ||||
|           sudo apt update | ||||
|           sudo apt install -y redis | ||||
|           redis-server --version | ||||
|  | ||||
|       - name: Install requirements | ||||
|         working-directory: api/tacticalrmm | ||||
|         run: | | ||||
|           python --version | ||||
|           SETTINGS_FILE="tacticalrmm/settings.py" | ||||
|           SETUPTOOLS_VER=$(grep "^SETUPTOOLS_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}') | ||||
|           WHEEL_VER=$(grep "^WHEEL_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}') | ||||
|           pip install --upgrade pip | ||||
|           pip install setuptools==${SETUPTOOLS_VER} wheel==${WHEEL_VER} | ||||
|           pip install -r requirements.txt -r requirements-test.txt | ||||
|  | ||||
|       - name: Codestyle black | ||||
|         working-directory: api | ||||
|         run: | | ||||
|           black --exclude migrations/ --check tacticalrmm | ||||
|           if [ $? -ne 0 ]; then | ||||
|               exit 1 | ||||
|           fi | ||||
|  | ||||
|       - name: Run django tests | ||||
|         env: | ||||
|           GHACTIONS: "yes" | ||||
|         working-directory: api/tacticalrmm | ||||
|         run: | | ||||
|           pytest | ||||
|           if [ $? -ne 0 ]; then | ||||
|               exit 1 | ||||
|           fi | ||||
|  | ||||
|       - uses: codecov/codecov-action@v3 | ||||
|         with: | ||||
|           directory: ./api/tacticalrmm | ||||
|           files: ./api/tacticalrmm/coverage.xml | ||||
|           verbose: true | ||||
							
								
								
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @@ -32,7 +32,7 @@ jobs: | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         language: [ 'go', 'javascript', 'python' ] | ||||
|         language: [ 'go', 'python' ] | ||||
|         # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] | ||||
|         # Learn more about CodeQL language support at https://git.io/codeql-language-support | ||||
|  | ||||
|   | ||||
							
								
								
									
										34
									
								
								.github/workflows/devskim-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										34
									
								
								.github/workflows/devskim-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,34 +0,0 @@ | ||||
| # This workflow uses actions that are not certified by GitHub. | ||||
| # They are provided by a third-party and are governed by | ||||
| # separate terms of service, privacy policy, and support | ||||
| # documentation. | ||||
|  | ||||
| name: DevSkim | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: [ develop ] | ||||
|   pull_request: | ||||
|     branches: [ develop ] | ||||
|   schedule: | ||||
|     - cron: '19 5 * * 0' | ||||
|  | ||||
| jobs: | ||||
|   lint: | ||||
|     name: DevSkim | ||||
|     runs-on: ubuntu-20.04 | ||||
|     permissions: | ||||
|       actions: read | ||||
|       contents: read | ||||
|       security-events: write | ||||
|     steps: | ||||
|       - name: Checkout code | ||||
|         uses: actions/checkout@v2 | ||||
|  | ||||
|       - name: Run DevSkim scanner | ||||
|         uses: microsoft/DevSkim-Action@v1 | ||||
|          | ||||
|       - name: Upload DevSkim scan results to GitHub Security tab | ||||
|         uses: github/codeql-action/upload-sarif@v1 | ||||
|         with: | ||||
|           sarif_file: devskim-results.sarif | ||||
							
								
								
									
										4
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -51,3 +51,7 @@ reset_db.sh | ||||
| run_go_cmd.py | ||||
| nats-api.conf | ||||
| ignore/ | ||||
| coverage.lcov | ||||
| daphne.sock.lock | ||||
| .pytest_cache | ||||
| coverage.xml | ||||
|   | ||||
							
								
								
									
										23
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| { | ||||
|   "recommendations": [ | ||||
|     // frontend | ||||
|     "dbaeumer.vscode-eslint", | ||||
|     "esbenp.prettier-vscode", | ||||
|     "editorconfig.editorconfig", | ||||
|     "vue.volar", | ||||
|     "wayou.vscode-todo-highlight", | ||||
|  | ||||
|     // python | ||||
|     "matangover.mypy", | ||||
|     "ms-python.python", | ||||
|  | ||||
|     // golang | ||||
|     "golang.go" | ||||
|   ], | ||||
|   "unwantedRecommendations": [ | ||||
|     "octref.vetur", | ||||
|     "hookyqr.beautify", | ||||
|     "dbaeumer.jshint", | ||||
|     "ms-vscode.vscode-typescript-tslint-plugin" | ||||
|   ] | ||||
| } | ||||
							
								
								
									
										135
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										135
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @@ -1,68 +1,73 @@ | ||||
| { | ||||
|     "python.defaultInterpreterPath": "api/tacticalrmm/env/bin/python", | ||||
|     "python.languageServer": "Pylance", | ||||
|     "python.analysis.extraPaths": [ | ||||
|         "api/tacticalrmm", | ||||
|         "api/env", | ||||
|     ], | ||||
|     "python.analysis.diagnosticSeverityOverrides": { | ||||
|         "reportUnusedImport": "error", | ||||
|         "reportDuplicateImport": "error", | ||||
|     }, | ||||
|     "python.analysis.typeCheckingMode": "basic", | ||||
|     "python.formatting.provider": "black", | ||||
|     "editor.formatOnSave": true, | ||||
|     "vetur.format.defaultFormatter.js": "prettier", | ||||
|     "vetur.format.defaultFormatterOptions": { | ||||
|         "prettier": { | ||||
|             "semi": true, | ||||
|             "printWidth": 120, | ||||
|             "tabWidth": 2, | ||||
|             "useTabs": false, | ||||
|             "arrowParens": "avoid", | ||||
|         } | ||||
|     }, | ||||
|     "vetur.format.options.tabSize": 2, | ||||
|     "vetur.format.options.useTabs": false, | ||||
|   "python.defaultInterpreterPath": "api/tacticalrmm/env/bin/python", | ||||
|   "python.languageServer": "Pylance", | ||||
|   "python.analysis.extraPaths": ["api/tacticalrmm", "api/env"], | ||||
|   "python.analysis.diagnosticSeverityOverrides": { | ||||
|     "reportUnusedImport": "error", | ||||
|     "reportDuplicateImport": "error", | ||||
|     "reportGeneralTypeIssues": "none" | ||||
|   }, | ||||
|   "python.analysis.typeCheckingMode": "basic", | ||||
|   "python.linting.enabled": true, | ||||
|   "python.linting.mypyEnabled": true, | ||||
|   "python.linting.mypyArgs": [ | ||||
|     "--ignore-missing-imports", | ||||
|     "--follow-imports=silent", | ||||
|     "--show-column-numbers", | ||||
|     "--strict" | ||||
|   ], | ||||
|   "python.linting.ignorePatterns": [ | ||||
|     "**/site-packages/**/*.py", | ||||
|     ".vscode/*.py", | ||||
|     "**env/**" | ||||
|   ], | ||||
|   "python.formatting.provider": "black", | ||||
|   "mypy.targets": ["api/tacticalrmm"], | ||||
|   "mypy.runUsingActiveInterpreter": true, | ||||
|   "editor.bracketPairColorization.enabled": true, | ||||
|   "editor.guides.bracketPairs": true, | ||||
|   "editor.formatOnSave": true, | ||||
|   "files.associations": { | ||||
|     "**/ansible/**/*.yml": "ansible", | ||||
|     "**/docker/**/docker-compose*.yml": "dockercompose" | ||||
|   }, | ||||
|   "files.watcherExclude": { | ||||
|     "files.watcherExclude": { | ||||
|         "files.watcherExclude": { | ||||
|             "**/.git/objects/**": true, | ||||
|             "**/.git/subtree-cache/**": true, | ||||
|             "**/node_modules/": true, | ||||
|             "/node_modules/**": true, | ||||
|             "**/env/": true, | ||||
|             "/env/**": true, | ||||
|             "**/__pycache__": true, | ||||
|             "/__pycache__/**": true, | ||||
|             "**/.cache": true, | ||||
|             "**/.eggs": true, | ||||
|             "**/.ipynb_checkpoints": true, | ||||
|             "**/.mypy_cache": true, | ||||
|             "**/.pytest_cache": true, | ||||
|             "**/*.egg-info": true, | ||||
|             "**/*.feather": true, | ||||
|             "**/*.parquet*": true, | ||||
|             "**/*.pyc": true, | ||||
|             "**/*.zip": true | ||||
|         }, | ||||
|     }, | ||||
|     "go.useLanguageServer": true, | ||||
|     "[go]": { | ||||
|         "editor.formatOnSave": true, | ||||
|         "editor.codeActionsOnSave": { | ||||
|             "source.organizeImports": false, | ||||
|         }, | ||||
|         "editor.snippetSuggestions": "none", | ||||
|     }, | ||||
|     "[go.mod]": { | ||||
|         "editor.formatOnSave": true, | ||||
|         "editor.codeActionsOnSave": { | ||||
|             "source.organizeImports": true, | ||||
|         }, | ||||
|     }, | ||||
|     "gopls": { | ||||
|         "usePlaceholders": true, | ||||
|         "completeUnimported": true, | ||||
|         "staticcheck": true, | ||||
|       "**/.git/objects/**": true, | ||||
|       "**/.git/subtree-cache/**": true, | ||||
|       "**/node_modules/": true, | ||||
|       "/node_modules/**": true, | ||||
|       "**/env/": true, | ||||
|       "/env/**": true, | ||||
|       "**/__pycache__": true, | ||||
|       "/__pycache__/**": true, | ||||
|       "**/.cache": true, | ||||
|       "**/.eggs": true, | ||||
|       "**/.ipynb_checkpoints": true, | ||||
|       "**/.mypy_cache": true, | ||||
|       "**/.pytest_cache": true, | ||||
|       "**/*.egg-info": true, | ||||
|       "**/*.feather": true, | ||||
|       "**/*.parquet*": true, | ||||
|       "**/*.pyc": true, | ||||
|       "**/*.zip": true | ||||
|     } | ||||
| } | ||||
|   }, | ||||
|   "go.useLanguageServer": true, | ||||
|   "[go]": { | ||||
|     "editor.codeActionsOnSave": { | ||||
|       "source.organizeImports": false | ||||
|     }, | ||||
|     "editor.snippetSuggestions": "none" | ||||
|   }, | ||||
|   "[go.mod]": { | ||||
|     "editor.codeActionsOnSave": { | ||||
|       "source.organizeImports": true | ||||
|     } | ||||
|   }, | ||||
|   "gopls": { | ||||
|     "usePlaceholders": true, | ||||
|     "completeUnimported": true, | ||||
|     "staticcheck": true | ||||
|   } | ||||
| } | ||||
|   | ||||
							
								
								
									
										23
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										23
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @@ -1,23 +0,0 @@ | ||||
| { | ||||
|     // See https://go.microsoft.com/fwlink/?LinkId=733558 | ||||
|     // for the documentation about the tasks.json format | ||||
|     "version": "2.0.0", | ||||
|     "tasks": [ | ||||
|         { | ||||
|             "label": "docker debug", | ||||
|             "type": "shell", | ||||
|             "command": "docker-compose", | ||||
|             "args": [ | ||||
|                 "-p", | ||||
|                 "trmm", | ||||
|                 "-f", | ||||
|                 ".devcontainer/docker-compose.yml", | ||||
|                 "-f", | ||||
|                 ".devcontainer/docker-compose.debug.yml", | ||||
|                 "up", | ||||
|                 "-d", | ||||
|                 "--build" | ||||
|             ] | ||||
|         } | ||||
|     ] | ||||
| } | ||||
							
								
								
									
										13
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										13
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,13 +1,13 @@ | ||||
| # Tactical RMM | ||||
|  | ||||
| [](https://dev.azure.com/dcparsi/Tactical%20RMM/_build/latest?definitionId=4&branchName=develop) | ||||
| [](https://coveralls.io/github/wh1te909/tacticalrmm?branch=develop) | ||||
|  | ||||
| [](https://codecov.io/gh/amidaware/tacticalrmm) | ||||
| [](https://github.com/python/black) | ||||
|  | ||||
| Tactical RMM is a remote monitoring & management tool, built with Django and Vue.\ | ||||
| It uses an [agent](https://github.com/amidaware/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral) | ||||
|  | ||||
| # [LIVE DEMO](https://rmm.tacticalrmm.io/) | ||||
| # [LIVE DEMO](https://demo.tacticalrmm.com/) | ||||
| Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app. | ||||
|  | ||||
| ### [Discord Chat](https://discord.gg/upGTkWp) | ||||
| @@ -28,9 +28,12 @@ Demo database resets every hour. A lot of features are disabled for obvious reas | ||||
| - Remote software installation via chocolatey | ||||
| - Software and hardware inventory | ||||
|  | ||||
| ## Windows versions supported | ||||
| ## Windows agent versions supported | ||||
|  | ||||
| - Windows 7, 8.1, 10, Server 2008R2, 2012R2, 2016, 2019 | ||||
| - Windows 7, 8.1, 10, 11, Server 2008R2, 2012R2, 2016, 2019, 2022 | ||||
|  | ||||
| ## Linux agent versions supported | ||||
| - Any distro with systemd which includes but is not limited to: Debian (10, 11), Ubuntu x86_64 (18.04, 20.04, 22.04), Synology 7, centos, freepbx and more! | ||||
|  | ||||
| ## Installation / Backup / Restore / Usage | ||||
|  | ||||
|   | ||||
| @@ -4,8 +4,8 @@ | ||||
|  | ||||
| | Version | Supported          | | ||||
| | ------- | ------------------ | | ||||
| | 0.12.0   | :white_check_mark: | | ||||
| | < 0.12.0 | :x:                | | ||||
| | 0.14.1   | :white_check_mark: | | ||||
| | < 0.14.1 | :x:                | | ||||
|  | ||||
| ## Reporting a Vulnerability | ||||
|  | ||||
|   | ||||
							
								
								
									
										3
									
								
								ansible/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								ansible/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| ### tacticalrmm ansible WIP | ||||
|  | ||||
| ansible role to setup a Debian 11 VM for tacticalrmm local development | ||||
							
								
								
									
										37
									
								
								ansible/roles/trmm_dev/defaults/main.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										37
									
								
								ansible/roles/trmm_dev/defaults/main.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,37 @@ | ||||
| --- | ||||
| user: "tactical" | ||||
| python_ver: "3.10.4" | ||||
| backend_repo: "https://github.com/amidaware/tacticalrmm.git" | ||||
| frontend_repo: "https://github.com/amidaware/tacticalrmm-web.git" | ||||
| scripts_repo: "https://github.com/amidaware/community-scripts.git" | ||||
| backend_dir: "/opt/trmm" | ||||
| frontend_dir: "/opt/trmm-web" | ||||
| scripts_dir: "/opt/community-scripts" | ||||
| trmm_dir: "/opt/trmm/api/tacticalrmm/tacticalrmm" | ||||
| settings_file: "{{ trmm_dir }}/settings.py" | ||||
| local_settings_file: "{{ trmm_dir }}/local_settings.py" | ||||
|  | ||||
| base_pkgs: | ||||
|   - build-essential | ||||
|   - curl | ||||
|   - wget | ||||
|   - dirmngr | ||||
|   - gnupg | ||||
|   - openssl | ||||
|   - gcc | ||||
|   - g++ | ||||
|   - make | ||||
|   - ca-certificates | ||||
|   - redis | ||||
|   - git | ||||
|  | ||||
| python_pkgs: | ||||
|   - zlib1g-dev | ||||
|   - libncurses5-dev | ||||
|   - libgdbm-dev | ||||
|   - libnss3-dev | ||||
|   - libssl-dev | ||||
|   - libreadline-dev | ||||
|   - libffi-dev | ||||
|   - libsqlite3-dev | ||||
|   - libbz2-dev | ||||
							
								
								
									
										25
									
								
								ansible/roles/trmm_dev/files/nginx-default.conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										25
									
								
								ansible/roles/trmm_dev/files/nginx-default.conf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,25 @@ | ||||
| worker_rlimit_nofile 1000000; | ||||
| user www-data; | ||||
| worker_processes auto; | ||||
| pid /run/nginx.pid; | ||||
| include /etc/nginx/modules-enabled/*.conf; | ||||
|  | ||||
| events { | ||||
|         worker_connections 2048; | ||||
| } | ||||
|  | ||||
| http { | ||||
|         sendfile on; | ||||
|         tcp_nopush on; | ||||
|         types_hash_max_size 2048; | ||||
|         server_names_hash_bucket_size 64; | ||||
|         include /etc/nginx/mime.types; | ||||
|         default_type application/octet-stream; | ||||
|         ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3; | ||||
|         ssl_prefer_server_ciphers on; | ||||
|         access_log /var/log/nginx/access.log; | ||||
|         error_log /var/log/nginx/error.log; | ||||
|         gzip on; | ||||
|         include /etc/nginx/conf.d/*.conf; | ||||
|         include /etc/nginx/sites-enabled/*; | ||||
| } | ||||
							
								
								
									
										20
									
								
								ansible/roles/trmm_dev/files/vimrc.local
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								ansible/roles/trmm_dev/files/vimrc.local
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | ||||
| " This file loads the default vim options at the beginning and prevents | ||||
| " that they are being loaded again later. All other options that will be set, | ||||
| " are added, or overwrite the default settings. Add as many options as you | ||||
| " whish at the end of this file. | ||||
|  | ||||
| " Load the defaults | ||||
| source $VIMRUNTIME/defaults.vim | ||||
|  | ||||
| " Prevent the defaults from being loaded again later, if the user doesn't | ||||
| " have a local vimrc (~/.vimrc) | ||||
| let skip_defaults_vim = 1 | ||||
|  | ||||
|  | ||||
| " Set more options (overwrites settings from /usr/share/vim/vim80/defaults.vim) | ||||
| " Add as many options as you whish | ||||
|  | ||||
| " Set the mouse mode to 'r' | ||||
| if has('mouse') | ||||
|   set mouse=r | ||||
| endif | ||||
							
								
								
									
										253
									
								
								ansible/roles/trmm_dev/tasks/main.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										253
									
								
								ansible/roles/trmm_dev/tasks/main.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,253 @@ | ||||
| --- | ||||
| - name: set mouse mode for vim | ||||
|   tags: vim | ||||
|   become: yes | ||||
|   ansible.builtin.copy: | ||||
|     src: vimrc.local | ||||
|     dest: /etc/vim/vimrc.local | ||||
|     owner: "root" | ||||
|     group: "root" | ||||
|     mode: "0644" | ||||
|  | ||||
| - name: install base packages | ||||
|   tags: base | ||||
|   become: yes | ||||
|   ansible.builtin.apt: | ||||
|     pkg: "{{ item }}" | ||||
|     state: present | ||||
|     update_cache: yes | ||||
|   with_items: | ||||
|     - "{{ base_pkgs }}" | ||||
|  | ||||
| - name: install python prereqs | ||||
|   tags: python | ||||
|   become: yes | ||||
|   ansible.builtin.apt: | ||||
|     pkg: "{{ item }}" | ||||
|     state: present | ||||
|   with_items: | ||||
|     - "{{ python_pkgs }}" | ||||
|  | ||||
| - name: get cpu core count | ||||
|   tags: python | ||||
|   ansible.builtin.command: nproc | ||||
|   register: numprocs | ||||
|  | ||||
| - name: Create python tmpdir | ||||
|   tags: python | ||||
|   ansible.builtin.tempfile: | ||||
|     state: directory | ||||
|     suffix: python | ||||
|   register: python_tmp | ||||
|  | ||||
| - name: download and extract python | ||||
|   tags: python | ||||
|   ansible.builtin.unarchive: | ||||
|     src: "https://www.python.org/ftp/python/{{ python_ver }}/Python-{{ python_ver }}.tgz" | ||||
|     dest: "{{ python_tmp.path }}" | ||||
|     remote_src: yes | ||||
|  | ||||
| - name: compile python | ||||
|   tags: python | ||||
|   ansible.builtin.shell: | ||||
|     chdir: "{{ python_tmp.path }}/Python-{{ python_ver }}" | ||||
|     cmd: | | ||||
|       ./configure --enable-optimizations | ||||
|       make -j {{ numprocs.stdout }} | ||||
|  | ||||
| - name: alt install python | ||||
|   tags: python | ||||
|   become: yes | ||||
|   ansible.builtin.shell: | ||||
|     chdir: "{{ python_tmp.path }}/Python-{{ python_ver }}" | ||||
|     cmd: | | ||||
|       make altinstall | ||||
|  | ||||
| - name: install nginx | ||||
|   tags: nginx | ||||
|   become: yes | ||||
|   ansible.builtin.apt: | ||||
|     pkg: nginx | ||||
|     state: present | ||||
|  | ||||
| - name: set nginx default conf | ||||
|   tags: nginx | ||||
|   become: yes | ||||
|   ansible.builtin.copy: | ||||
|     src: nginx-default.conf | ||||
|     dest: /etc/nginx/nginx.conf | ||||
|     owner: "root" | ||||
|     group: "root" | ||||
|     mode: "0644" | ||||
|  | ||||
| - name: ensure nginx enabled and restarted | ||||
|   tags: nginx | ||||
|   become: yes | ||||
|   ansible.builtin.service: | ||||
|     name: nginx | ||||
|     enabled: yes | ||||
|     state: restarted | ||||
|  | ||||
| - name: create postgres repo | ||||
|   tags: postgres | ||||
|   become: yes | ||||
|   ansible.builtin.copy: | ||||
|     content: "deb http://apt.postgresql.org/pub/repos/apt bullseye-pgdg main" | ||||
|     dest: /etc/apt/sources.list.d/pgdg.list | ||||
|     owner: root | ||||
|     group: root | ||||
|     mode: "0440" | ||||
|  | ||||
| - name: import postgres repo signing key | ||||
|   tags: postgres | ||||
|   become: yes | ||||
|   ansible.builtin.apt_key: | ||||
|     url: https://www.postgresql.org/media/keys/ACCC4CF8.asc | ||||
|     state: present | ||||
|  | ||||
| - name: install postgresql | ||||
|   tags: postgres | ||||
|   become: yes | ||||
|   ansible.builtin.apt: | ||||
|     pkg: postgresql-14 | ||||
|     state: present | ||||
|     update_cache: yes | ||||
|  | ||||
| - name: ensure postgres enabled and started | ||||
|   tags: postgres | ||||
|   become: yes | ||||
|   ansible.builtin.service: | ||||
|     name: postgresql | ||||
|     enabled: yes | ||||
|     state: started | ||||
|  | ||||
| - name: setup database | ||||
|   tags: postgres | ||||
|   become: yes | ||||
|   become_user: postgres | ||||
|   ansible.builtin.shell: | ||||
|     cmd: | | ||||
|       psql -c "CREATE DATABASE tacticalrmm" | ||||
|       psql -c "CREATE USER {{ db_user }} WITH PASSWORD '{{ db_passwd }}'" | ||||
|       psql -c "ALTER ROLE {{ db_user }} SET client_encoding TO 'utf8'" | ||||
|       psql -c "ALTER ROLE {{ db_user }} SET default_transaction_isolation TO 'read committed'" | ||||
|       psql -c "ALTER ROLE {{ db_user }} SET timezone TO 'UTC'" | ||||
|       psql -c "ALTER ROLE {{ db_user }} CREATEDB" | ||||
|       psql -c "GRANT ALL PRIVILEGES ON DATABASE tacticalrmm TO {{ db_user }}" | ||||
|  | ||||
| - name: create repo dirs | ||||
|   become: yes | ||||
|   tags: git | ||||
|   ansible.builtin.file: | ||||
|     path: "{{ item }}" | ||||
|     state: directory | ||||
|     owner: "{{ user }}" | ||||
|     group: "{{ user }}" | ||||
|     mode: "0755" | ||||
|   with_items: | ||||
|     - "{{ backend_dir }}" | ||||
|     - "{{ frontend_dir }}" | ||||
|     - "{{ scripts_dir }}" | ||||
|  | ||||
| - name: git clone repos | ||||
|   tags: git | ||||
|   ansible.builtin.git: | ||||
|     repo: "{{ item.repo }}" | ||||
|     dest: "{{ item.dest }}" | ||||
|     version: "{{ item.version }}" | ||||
|   with_items: | ||||
|     - { | ||||
|         repo: "{{ backend_repo }}", | ||||
|         dest: "{{ backend_dir }}", | ||||
|         version: develop, | ||||
|       } | ||||
|     - { | ||||
|         repo: "{{ frontend_repo }}", | ||||
|         dest: "{{ frontend_dir }}", | ||||
|         version: develop, | ||||
|       } | ||||
|     - { repo: "{{ scripts_repo }}", dest: "{{ scripts_dir }}", version: main } | ||||
|  | ||||
| - name: get nats_server_ver | ||||
|   tags: nats | ||||
|   ansible.builtin.shell: grep "^NATS_SERVER_VER" {{ settings_file }} | awk -F'[= "]' '{print $5}' | ||||
|   register: nats_server_ver | ||||
|  | ||||
| - name: Create nats tmpdir | ||||
|   tags: nats | ||||
|   ansible.builtin.tempfile: | ||||
|     state: directory | ||||
|     suffix: nats | ||||
|   register: nats_tmp | ||||
|  | ||||
| - name: download and extract nats | ||||
|   tags: nats | ||||
|   ansible.builtin.unarchive: | ||||
|     src: "https://github.com/nats-io/nats-server/releases/download/v{{ nats_server_ver.stdout }}/nats-server-v{{ nats_server_ver.stdout }}-linux-amd64.tar.gz" | ||||
|     dest: "{{ nats_tmp.path }}" | ||||
|     remote_src: yes | ||||
|  | ||||
| - name: install nats | ||||
|   tags: nats | ||||
|   become: yes | ||||
|   ansible.builtin.copy: | ||||
|     remote_src: yes | ||||
|     src: "{{ nats_tmp.path }}/nats-server-v{{ nats_server_ver.stdout }}-linux-amd64/nats-server" | ||||
|     dest: /usr/local/bin/nats-server | ||||
|     owner: "{{ user }}" | ||||
|     group: "{{ user }}" | ||||
|     mode: "0755" | ||||
|  | ||||
| - name: Create nodejs tmpdir | ||||
|   tags: nodejs | ||||
|   ansible.builtin.tempfile: | ||||
|     state: directory | ||||
|     suffix: nodejs | ||||
|   register: nodejs_tmp | ||||
|  | ||||
| - name: download nodejs setup | ||||
|   tags: nodejs | ||||
|   ansible.builtin.get_url: | ||||
|     url: https://deb.nodesource.com/setup_16.x | ||||
|     dest: "{{ nodejs_tmp.path }}/setup_node.sh" | ||||
|     mode: "0755" | ||||
|  | ||||
| - name: run node setup script | ||||
|   tags: nodejs | ||||
|   become: yes | ||||
|   ansible.builtin.command: | ||||
|     cmd: "{{ nodejs_tmp.path }}/setup_node.sh" | ||||
|  | ||||
| - name: install nodejs | ||||
|   tags: nodejs | ||||
|   become: yes | ||||
|   ansible.builtin.apt: | ||||
|     pkg: nodejs | ||||
|     state: present | ||||
|     update_cache: yes | ||||
|  | ||||
| - name: update npm | ||||
|   tags: nodejs | ||||
|   become: yes | ||||
|   ansible.builtin.shell: | ||||
|     cmd: npm install -g npm | ||||
|  | ||||
| - name: deploy django local settings | ||||
|   tags: django | ||||
|   ansible.builtin.template: | ||||
|     src: local_settings.j2 | ||||
|     dest: "{{ local_settings_file }}" | ||||
|     mode: "0644" | ||||
|     owner: "{{ user }}" | ||||
|     group: "{{ user }}" | ||||
|  | ||||
| - name: remove tempdirs | ||||
|   tags: cleanup | ||||
|   become: yes | ||||
|   ansible.builtin.file: | ||||
|     path: "{{ item }}" | ||||
|     state: absent | ||||
|   with_items: | ||||
|     - "{{ nats_tmp.path }}" | ||||
|     - "{{ python_tmp.path }}" | ||||
|     - "{{ nodejs_tmp.path }}" | ||||
							
								
								
									
										19
									
								
								ansible/roles/trmm_dev/templates/local_settings.j2
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								ansible/roles/trmm_dev/templates/local_settings.j2
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | ||||
| SECRET_KEY = "{{ django_secret }}" | ||||
| DEBUG = True | ||||
| ALLOWED_HOSTS = ['{{ api }}'] | ||||
| ADMIN_URL = "admin/" | ||||
| CORS_ORIGIN_WHITELIST = [ | ||||
|     "https://{{ rmm }}" | ||||
| ] | ||||
| DATABASES = { | ||||
|     'default': { | ||||
|         'ENGINE': 'django.db.backends.postgresql', | ||||
|         'NAME': 'tacticalrmm', | ||||
|         'USER': '{{ db_user }}', | ||||
|         'PASSWORD': '{{ db_passwd }}', | ||||
|         'HOST': 'localhost', | ||||
|         'PORT': '5432', | ||||
|     } | ||||
| } | ||||
| REDIS_HOST    = "localhost" | ||||
| ADMIN_ENABLED = True | ||||
							
								
								
									
										14
									
								
								ansible/roles/trmm_dev/vars/main.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								ansible/roles/trmm_dev/vars/main.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | ||||
| --- | ||||
| api: 'api.example.com' | ||||
| rmm: 'rmm.example.com' | ||||
| mesh: 'mesh.example.com' | ||||
| github_username: 'changeme' | ||||
| github_email: 'changeme@example.com' | ||||
| mesh_site: 'changeme' | ||||
| mesh_user: 'changeme' | ||||
| mesh_token: 'changeme' | ||||
| db_user: 'changeme' | ||||
| db_passwd: 'changeme' | ||||
| django_secret: 'changeme' | ||||
|  | ||||
|  | ||||
							
								
								
									
										6
									
								
								ansible/setup_dev.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								ansible/setup_dev.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| --- | ||||
| - hosts: "{{ target }}" | ||||
|   vars: | ||||
|     ansible_user: tactical | ||||
|   roles: | ||||
|     - trmm_dev | ||||
| @@ -1,26 +1,15 @@ | ||||
| [run] | ||||
| source = . | ||||
| [report] | ||||
| show_missing = True | ||||
| include = *.py | ||||
| omit = | ||||
|     tacticalrmm/asgi.py | ||||
|     tacticalrmm/wsgi.py | ||||
|     manage.py | ||||
|     */__pycache__/* | ||||
|     */env/* | ||||
|     */management/* | ||||
|     */migrations/* | ||||
|     */static/* | ||||
|     manage.py | ||||
|     */local_settings.py | ||||
|     */apps.py | ||||
|     */admin.py | ||||
|     */celery.py | ||||
|     */wsgi.py | ||||
|     */settings.py | ||||
|     */baker_recipes.py | ||||
|     */urls.py | ||||
|     */tests.py | ||||
|     */test.py | ||||
|     checks/utils.py | ||||
|     */asgi.py | ||||
|     */demo_views.py | ||||
|     /usr/local/lib/* | ||||
|     **/migrations/* | ||||
|     **/test*.py | ||||
|      | ||||
| [report] | ||||
| show_missing = True | ||||
|   | ||||
| @@ -1,22 +1,23 @@ | ||||
| import uuid | ||||
|  | ||||
| from accounts.models import User | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from accounts.models import User | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Creates the installer user" | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|     def handle(self, *args, **kwargs):  # type: ignore | ||||
|         self.stdout.write("Checking if installer user has been created...") | ||||
|         if User.objects.filter(is_installer_user=True).exists(): | ||||
|             self.stdout.write("Installer user already exists") | ||||
|             return | ||||
|  | ||||
|         User.objects.create_user(  # type: ignore | ||||
|         User.objects.create_user( | ||||
|             username=uuid.uuid4().hex, | ||||
|             is_installer_user=True, | ||||
|             password=User.objects.make_random_password(60),  # type: ignore | ||||
|             password=User.objects.make_random_password(60), | ||||
|             block_dashboard_login=True, | ||||
|         ) | ||||
|         self.stdout.write("Installer user has been created") | ||||
|   | ||||
| @@ -6,7 +6,7 @@ from knox.models import AuthToken | ||||
| class Command(BaseCommand): | ||||
|     help = "Deletes all knox web tokens" | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|     def handle(self, *args, **kwargs):  # type: ignore | ||||
|         # only delete web tokens, not any generated by the installer or deployments | ||||
|         dont_delete = djangotime.now() + djangotime.timedelta(hours=23) | ||||
|         tokens = AuthToken.objects.exclude(deploytokens__isnull=False).filter( | ||||
|   | ||||
| @@ -1,9 +1,10 @@ | ||||
| import subprocess | ||||
|  | ||||
| import pyotp | ||||
| from accounts.models import User | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from accounts.models import User | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Generates barcode for Authenticator and creates totp for user" | ||||
|   | ||||
| @@ -2,9 +2,10 @@ import os | ||||
| import subprocess | ||||
|  | ||||
| import pyotp | ||||
| from accounts.models import User | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from accounts.models import User | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Reset 2fa" | ||||
|   | ||||
| @@ -1,6 +1,7 @@ | ||||
| from accounts.models import User | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from accounts.models import User | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Reset password for user" | ||||
|   | ||||
							
								
								
									
										18
									
								
								api/tacticalrmm/accounts/migrations/0031_user_date_format.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								api/tacticalrmm/accounts/migrations/0031_user_date_format.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.2.12 on 2022-04-02 15:57 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('accounts', '0030_auto_20211104_0221'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='user', | ||||
|             name='date_format', | ||||
|             field=models.CharField(blank=True, max_length=30, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,25 +1,17 @@ | ||||
| from typing import Optional | ||||
|  | ||||
| from django.contrib.auth.models import AbstractUser | ||||
| from django.core.cache import cache | ||||
| from django.db import models | ||||
| from django.db.models.fields import CharField, DateTimeField | ||||
|  | ||||
| from logs.models import BaseAuditModel | ||||
|  | ||||
| AGENT_DBLCLICK_CHOICES = [ | ||||
|     ("editagent", "Edit Agent"), | ||||
|     ("takecontrol", "Take Control"), | ||||
|     ("remotebg", "Remote Background"), | ||||
|     ("urlaction", "URL Action"), | ||||
| ] | ||||
|  | ||||
| AGENT_TBL_TAB_CHOICES = [ | ||||
|     ("server", "Servers"), | ||||
|     ("workstation", "Workstations"), | ||||
|     ("mixed", "Mixed"), | ||||
| ] | ||||
|  | ||||
| CLIENT_TREE_SORT_CHOICES = [ | ||||
|     ("alphafail", "Move failing clients to the top"), | ||||
|     ("alpha", "Sort alphabetically"), | ||||
| ] | ||||
| from tacticalrmm.constants import ( | ||||
|     ROLE_CACHE_PREFIX, | ||||
|     AgentDblClick, | ||||
|     AgentTableTabs, | ||||
|     ClientTreeSort, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class User(AbstractUser, BaseAuditModel): | ||||
| @@ -28,8 +20,8 @@ class User(AbstractUser, BaseAuditModel): | ||||
|     totp_key = models.CharField(max_length=50, null=True, blank=True) | ||||
|     dark_mode = models.BooleanField(default=True) | ||||
|     show_community_scripts = models.BooleanField(default=True) | ||||
|     agent_dblclick_action = models.CharField( | ||||
|         max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent" | ||||
|     agent_dblclick_action: "AgentDblClick" = models.CharField( | ||||
|         max_length=50, choices=AgentDblClick.choices, default=AgentDblClick.EDIT_AGENT | ||||
|     ) | ||||
|     url_action = models.ForeignKey( | ||||
|         "core.URLAction", | ||||
| @@ -39,15 +31,16 @@ class User(AbstractUser, BaseAuditModel): | ||||
|         on_delete=models.SET_NULL, | ||||
|     ) | ||||
|     default_agent_tbl_tab = models.CharField( | ||||
|         max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server" | ||||
|         max_length=50, choices=AgentTableTabs.choices, default=AgentTableTabs.SERVER | ||||
|     ) | ||||
|     agents_per_page = models.PositiveIntegerField(default=50)  # not currently used | ||||
|     client_tree_sort = models.CharField( | ||||
|         max_length=50, choices=CLIENT_TREE_SORT_CHOICES, default="alphafail" | ||||
|         max_length=50, choices=ClientTreeSort.choices, default=ClientTreeSort.ALPHA_FAIL | ||||
|     ) | ||||
|     client_tree_splitter = models.PositiveIntegerField(default=11) | ||||
|     loading_bar_color = models.CharField(max_length=255, default="red") | ||||
|     clear_search_when_switching = models.BooleanField(default=True) | ||||
|     date_format = models.CharField(max_length=30, blank=True, null=True) | ||||
|     is_installer_user = models.BooleanField(default=False) | ||||
|     last_login_ip = models.GenericIPAddressField(default=None, blank=True, null=True) | ||||
|  | ||||
| @@ -74,6 +67,23 @@ class User(AbstractUser, BaseAuditModel): | ||||
|  | ||||
|         return UserSerializer(user).data | ||||
|  | ||||
|     def get_and_set_role_cache(self) -> "Optional[Role]": | ||||
|         role = cache.get(f"{ROLE_CACHE_PREFIX}{self.role}") | ||||
|  | ||||
|         if role and isinstance(role, Role): | ||||
|             return role | ||||
|         elif not role and not self.role: | ||||
|             return None | ||||
|         else: | ||||
|             models.prefetch_related_objects( | ||||
|                 [self.role], | ||||
|                 "can_view_clients", | ||||
|                 "can_view_sites", | ||||
|             ) | ||||
|  | ||||
|             cache.set(f"{ROLE_CACHE_PREFIX}{self.role}", self.role, 600) | ||||
|             return self.role | ||||
|  | ||||
|  | ||||
| class Role(BaseAuditModel): | ||||
|     name = models.CharField(max_length=255, unique=True) | ||||
| @@ -174,6 +184,12 @@ class Role(BaseAuditModel): | ||||
|     def __str__(self): | ||||
|         return self.name | ||||
|  | ||||
|     def save(self, *args, **kwargs) -> None: | ||||
|  | ||||
|         # delete cache on save | ||||
|         cache.delete(f"{ROLE_CACHE_PREFIX}{self.name}") | ||||
|         super(BaseAuditModel, self).save(*args, **kwargs) | ||||
|  | ||||
|     @staticmethod | ||||
|     def serialize(role): | ||||
|         # serializes the agent and returns json | ||||
|   | ||||
| @@ -4,7 +4,7 @@ from tacticalrmm.permissions import _has_perm | ||||
|  | ||||
|  | ||||
| class AccountsPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         if r.method == "GET": | ||||
|             return _has_perm(r, "can_list_accounts") | ||||
|         else: | ||||
| @@ -28,7 +28,7 @@ class AccountsPerms(permissions.BasePermission): | ||||
|  | ||||
|  | ||||
| class RolesPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         if r.method == "GET": | ||||
|             return _has_perm(r, "can_list_roles") | ||||
|         else: | ||||
| @@ -36,7 +36,7 @@ class RolesPerms(permissions.BasePermission): | ||||
|  | ||||
|  | ||||
| class APIKeyPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         if r.method == "GET": | ||||
|             return _has_perm(r, "can_list_api_keys") | ||||
|  | ||||
|   | ||||
| @@ -22,6 +22,7 @@ class UserUISerializer(ModelSerializer): | ||||
|             "loading_bar_color", | ||||
|             "clear_search_when_switching", | ||||
|             "block_dashboard_login", | ||||
|             "date_format", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| @@ -39,6 +40,7 @@ class UserSerializer(ModelSerializer): | ||||
|             "last_login_ip", | ||||
|             "role", | ||||
|             "block_dashboard_login", | ||||
|             "date_format", | ||||
|         ] | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -1,16 +1,17 @@ | ||||
| from unittest.mock import patch | ||||
|  | ||||
| from accounts.models import APIKey, User | ||||
| from accounts.serializers import APIKeySerializer | ||||
| from django.test import override_settings | ||||
| from model_bakery import baker, seq | ||||
|  | ||||
| from accounts.models import APIKey, User | ||||
| from accounts.serializers import APIKeySerializer | ||||
| from tacticalrmm.constants import AgentDblClick, AgentTableTabs, ClientTreeSort | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
|  | ||||
|  | ||||
| class TestAccounts(TacticalTestCase): | ||||
|     def setUp(self): | ||||
|         self.client_setup() | ||||
|         self.setup_client() | ||||
|         self.bob = User(username="bob") | ||||
|         self.bob.set_password("hunter2") | ||||
|         self.bob.save() | ||||
| @@ -69,17 +70,17 @@ class TestAccounts(TacticalTestCase): | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|         self.assertIn("non_field_errors", r.data.keys()) | ||||
|  | ||||
|     @override_settings(DEBUG=True) | ||||
|     @patch("pyotp.TOTP.verify") | ||||
|     def test_debug_login_view(self, mock_verify): | ||||
|         url = "/login/" | ||||
|         mock_verify.return_value = True | ||||
|     # @override_settings(DEBUG=True) | ||||
|     # @patch("pyotp.TOTP.verify") | ||||
|     # def test_debug_login_view(self, mock_verify): | ||||
|     #     url = "/login/" | ||||
|     #     mock_verify.return_value = True | ||||
|  | ||||
|         data = {"username": "bob", "password": "hunter2", "twofactor": "sekret"} | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertIn("expiry", r.data.keys()) | ||||
|         self.assertIn("token", r.data.keys()) | ||||
|     #     data = {"username": "bob", "password": "hunter2", "twofactor": "sekret"} | ||||
|     #     r = self.client.post(url, data, format="json") | ||||
|     #     self.assertEqual(r.status_code, 200) | ||||
|     #     self.assertIn("expiry", r.data.keys()) | ||||
|     #     self.assertIn("token", r.data.keys()) | ||||
|  | ||||
|  | ||||
| class TestGetAddUsers(TacticalTestCase): | ||||
| @@ -283,9 +284,9 @@ class TestUserAction(TacticalTestCase): | ||||
|         data = { | ||||
|             "dark_mode": True, | ||||
|             "show_community_scripts": True, | ||||
|             "agent_dblclick_action": "editagent", | ||||
|             "default_agent_tbl_tab": "mixed", | ||||
|             "client_tree_sort": "alpha", | ||||
|             "agent_dblclick_action": AgentDblClick.EDIT_AGENT, | ||||
|             "default_agent_tbl_tab": AgentTableTabs.MIXED, | ||||
|             "client_tree_sort": ClientTreeSort.ALPHA, | ||||
|             "client_tree_splitter": 14, | ||||
|             "loading_bar_color": "green", | ||||
|             "clear_search_when_switching": False, | ||||
| @@ -308,7 +309,7 @@ class TestAPIKeyViews(TacticalTestCase): | ||||
|         serializer = APIKeySerializer(apikeys, many=True) | ||||
|         resp = self.client.get(url, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(serializer.data, resp.data)  # type: ignore | ||||
|         self.assertEqual(serializer.data, resp.data) | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
| @@ -331,14 +332,14 @@ class TestAPIKeyViews(TacticalTestCase): | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         apikey = baker.make("accounts.APIKey", name="Test") | ||||
|         url = f"/accounts/apikeys/{apikey.pk}/"  # type: ignore | ||||
|         url = f"/accounts/apikeys/{apikey.pk}/" | ||||
|  | ||||
|         data = {"name": "New Name"}  # type: ignore | ||||
|         data = {"name": "New Name"} | ||||
|  | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         apikey = APIKey.objects.get(pk=apikey.pk)  # type: ignore | ||||
|         self.assertEquals(apikey.name, "New Name") | ||||
|         apikey = APIKey.objects.get(pk=apikey.pk) | ||||
|         self.assertEqual(apikey.name, "New Name") | ||||
|  | ||||
|         self.check_not_authenticated("put", url) | ||||
|  | ||||
| @@ -349,11 +350,11 @@ class TestAPIKeyViews(TacticalTestCase): | ||||
|  | ||||
|         # test delete api key | ||||
|         apikey = baker.make("accounts.APIKey") | ||||
|         url = f"/accounts/apikeys/{apikey.pk}/"  # type: ignore | ||||
|         url = f"/accounts/apikeys/{apikey.pk}/" | ||||
|         resp = self.client.delete(url, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         self.assertFalse(APIKey.objects.filter(pk=apikey.pk).exists())  # type: ignore | ||||
|         self.assertFalse(APIKey.objects.filter(pk=apikey.pk).exists()) | ||||
|  | ||||
|         self.check_not_authenticated("delete", url) | ||||
|  | ||||
| @@ -393,7 +394,7 @@ class TestAPIAuthentication(TacticalTestCase): | ||||
|             name="Test Token", key="123456", user=self.user | ||||
|         ) | ||||
|  | ||||
|         self.client_setup() | ||||
|         self.setup_client() | ||||
|  | ||||
|     def test_api_auth(self): | ||||
|         url = "/clients/" | ||||
|   | ||||
| @@ -5,13 +5,13 @@ from django.db import IntegrityError | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from ipware import get_client_ip | ||||
| from knox.views import LoginView as KnoxLoginView | ||||
| from logs.models import AuditLog | ||||
| from rest_framework.authtoken.serializers import AuthTokenSerializer | ||||
| from rest_framework.permissions import AllowAny, IsAuthenticated | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from tacticalrmm.utils import notify_error | ||||
| from logs.models import AuditLog | ||||
| from tacticalrmm.helpers import notify_error | ||||
|  | ||||
| from .models import APIKey, Role, User | ||||
| from .permissions import AccountsPerms, APIKeyPerms, RolesPerms | ||||
| @@ -93,7 +93,7 @@ class LoginView(KnoxLoginView): | ||||
|             login(request, user) | ||||
|  | ||||
|             # save ip information | ||||
|             client_ip, is_routable = get_client_ip(request) | ||||
|             client_ip, _ = get_client_ip(request) | ||||
|             user.last_login_ip = client_ip | ||||
|             user.save() | ||||
|  | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| import json | ||||
| import os | ||||
| import random | ||||
| import secrets | ||||
| import string | ||||
| from itertools import cycle | ||||
|  | ||||
| @@ -8,10 +8,11 @@ from django.conf import settings | ||||
| from django.utils import timezone as djangotime | ||||
| from model_bakery.recipe import Recipe, foreign_key, seq | ||||
|  | ||||
| from tacticalrmm.constants import AgentMonType, AgentPlat | ||||
|  | ||||
| def generate_agent_id(hostname): | ||||
|     rand = "".join(random.choice(string.ascii_letters) for _ in range(35)) | ||||
|     return f"{rand}-{hostname}" | ||||
|  | ||||
| def generate_agent_id() -> str: | ||||
|     return "".join(secrets.choice(string.ascii_letters) for i in range(39)) | ||||
|  | ||||
|  | ||||
| site = Recipe("clients.Site") | ||||
| @@ -24,26 +25,34 @@ def get_wmi_data(): | ||||
|         return json.load(f) | ||||
|  | ||||
|  | ||||
| def get_win_svcs(): | ||||
|     svcs = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winsvcs.json") | ||||
|     with open(svcs) as f: | ||||
|         return json.load(f) | ||||
|  | ||||
|  | ||||
| agent = Recipe( | ||||
|     "agents.Agent", | ||||
|     site=foreign_key(site), | ||||
|     hostname="DESKTOP-TEST123", | ||||
|     version="1.3.0", | ||||
|     monitoring_type=cycle(["workstation", "server"]), | ||||
|     agent_id=seq(generate_agent_id("DESKTOP-TEST123")), | ||||
|     monitoring_type=cycle(AgentMonType.values), | ||||
|     agent_id=seq(generate_agent_id()), | ||||
|     last_seen=djangotime.now() - djangotime.timedelta(days=5), | ||||
|     plat="windows", | ||||
|     plat=AgentPlat.WINDOWS, | ||||
| ) | ||||
|  | ||||
| server_agent = agent.extend( | ||||
|     monitoring_type="server", | ||||
|     monitoring_type=AgentMonType.SERVER, | ||||
| ) | ||||
|  | ||||
| workstation_agent = agent.extend( | ||||
|     monitoring_type="workstation", | ||||
|     monitoring_type=AgentMonType.WORKSTATION, | ||||
| ) | ||||
|  | ||||
| online_agent = agent.extend(last_seen=djangotime.now()) | ||||
| online_agent = agent.extend( | ||||
|     last_seen=djangotime.now(), services=get_win_svcs(), wmi_detail=get_wmi_data() | ||||
| ) | ||||
|  | ||||
| offline_agent = agent.extend( | ||||
|     last_seen=djangotime.now() - djangotime.timedelta(minutes=7) | ||||
| @@ -78,4 +87,4 @@ agent_with_services = agent.extend( | ||||
|     ], | ||||
| ) | ||||
|  | ||||
| agent_with_wmi = agent.extend(wmi=get_wmi_data()) | ||||
| agent_with_wmi = agent.extend(wmi_detail=get_wmi_data()) | ||||
|   | ||||
							
								
								
									
										83
									
								
								api/tacticalrmm/agents/consumers.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										83
									
								
								api/tacticalrmm/agents/consumers.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,83 @@ | ||||
| from agents.models import Agent, AgentHistory | ||||
| from channels.db import database_sync_to_async | ||||
| from channels.generic.websocket import AsyncJsonWebsocketConsumer | ||||
| from django.contrib.auth.models import AnonymousUser | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from tacticalrmm.constants import AGENT_DEFER, AgentHistoryType | ||||
| from tacticalrmm.permissions import _has_perm_on_agent | ||||
|  | ||||
|  | ||||
| class SendCMD(AsyncJsonWebsocketConsumer): | ||||
|     async def connect(self): | ||||
|  | ||||
|         self.user = self.scope["user"] | ||||
|  | ||||
|         if isinstance(self.user, AnonymousUser): | ||||
|             await self.close() | ||||
|  | ||||
|         await self.accept() | ||||
|  | ||||
|     async def receive_json(self, payload, **kwargs): | ||||
|         auth = await self.has_perm(payload["agent_id"]) | ||||
|         if not auth: | ||||
|             await self.send_json( | ||||
|                 {"ret": "You do not have permission to perform this action."} | ||||
|             ) | ||||
|             return | ||||
|  | ||||
|         agent = await self.get_agent(payload["agent_id"]) | ||||
|         timeout = int(payload["timeout"]) | ||||
|         if payload["shell"] == "custom" and payload["custom_shell"]: | ||||
|             shell = payload["custom_shell"] | ||||
|         else: | ||||
|             shell = payload["shell"] | ||||
|  | ||||
|         hist_pk = await self.get_history_id(agent, payload["cmd"]) | ||||
|  | ||||
|         data = { | ||||
|             "func": "rawcmd", | ||||
|             "timeout": timeout, | ||||
|             "payload": { | ||||
|                 "command": payload["cmd"], | ||||
|                 "shell": shell, | ||||
|             }, | ||||
|             "id": hist_pk, | ||||
|         } | ||||
|  | ||||
|         ret = await agent.nats_cmd(data, timeout=timeout + 2) | ||||
|         await self.send_json({"ret": ret}) | ||||
|  | ||||
|     async def disconnect(self, _): | ||||
|         await self.close() | ||||
|  | ||||
|     def _has_perm(self, perm: str) -> bool: | ||||
|         if self.user.is_superuser or ( | ||||
|             self.user.role and getattr(self.user.role, "is_superuser") | ||||
|         ): | ||||
|             return True | ||||
|  | ||||
|         # make sure non-superusers with empty roles aren't permitted | ||||
|         elif not self.user.role: | ||||
|             return False | ||||
|  | ||||
|         return self.user.role and getattr(self.user.role, perm) | ||||
|  | ||||
|     @database_sync_to_async  # type: ignore | ||||
|     def get_agent(self, agent_id: str) -> "Agent": | ||||
|         return get_object_or_404(Agent.objects.defer(*AGENT_DEFER), agent_id=agent_id) | ||||
|  | ||||
|     @database_sync_to_async  # type: ignore | ||||
|     def get_history_id(self, agent: "Agent", cmd: str) -> int: | ||||
|         hist = AgentHistory.objects.create( | ||||
|             agent=agent, | ||||
|             type=AgentHistoryType.CMD_RUN, | ||||
|             command=cmd, | ||||
|             username=self.user.username[:50], | ||||
|         ) | ||||
|         return hist.pk | ||||
|  | ||||
|     @database_sync_to_async  # type: ignore | ||||
|     def has_perm(self, agent_id: str) -> bool: | ||||
|         return self._has_perm("can_send_cmd") and _has_perm_on_agent( | ||||
|             self.user, agent_id | ||||
|         ) | ||||
| @@ -1,6 +1,7 @@ | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from agents.models import Agent | ||||
| from clients.models import Client, Site | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|   | ||||
| @@ -1,10 +1,10 @@ | ||||
| import asyncio | ||||
|  | ||||
| from agents.models import Agent | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils import timezone as djangotime | ||||
| from packaging import version as pyver | ||||
|  | ||||
| from agents.models import Agent | ||||
| from tacticalrmm.constants import AGENT_DEFER | ||||
| from tacticalrmm.utils import reload_nats | ||||
|  | ||||
|   | ||||
| @@ -1,11 +1,12 @@ | ||||
| # import datetime as dt | ||||
| import random | ||||
|  | ||||
| from agents.models import Agent | ||||
| from core.tasks import cache_db_fields_task | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from agents.models import Agent | ||||
| from core.tasks import cache_db_fields_task, handle_resolved_stuff | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "stuff for demo site in cron" | ||||
| @@ -23,17 +24,10 @@ class Command(BaseCommand): | ||||
|             rand = now - djangotime.timedelta(minutes=random.randint(10, 20)) | ||||
|             random_dates.append(rand) | ||||
|  | ||||
|         """ for _ in range(5): | ||||
|             rand = djangotime.now() - djangotime.timedelta(hours=random.randint(1, 10)) | ||||
|             random_dates.append(rand) | ||||
|  | ||||
|         for _ in range(5): | ||||
|             rand = djangotime.now() - djangotime.timedelta(days=random.randint(40, 90)) | ||||
|             random_dates.append(rand) """ | ||||
|  | ||||
|         agents = Agent.objects.only("last_seen") | ||||
|         for agent in agents: | ||||
|             agent.last_seen = random.choice(random_dates) | ||||
|             agent.save(update_fields=["last_seen"]) | ||||
|  | ||||
|         cache_db_fields_task() | ||||
|         handle_resolved_stuff() | ||||
|   | ||||
| @@ -3,30 +3,54 @@ import json | ||||
| import random | ||||
| import string | ||||
|  | ||||
| from accounts.models import User | ||||
| from agents.models import Agent, AgentHistory | ||||
| from automation.models import Policy | ||||
| from autotasks.models import AutomatedTask | ||||
| from checks.models import Check, CheckHistory | ||||
| from clients.models import Client, Site | ||||
| from django.conf import settings | ||||
| from django.core.management import call_command | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from accounts.models import User | ||||
| from agents.models import Agent, AgentHistory | ||||
| from automation.models import Policy | ||||
| from autotasks.models import AutomatedTask, TaskResult | ||||
| from checks.models import Check, CheckHistory, CheckResult | ||||
| from clients.models import Client, Site | ||||
| from logs.models import AuditLog, PendingAction | ||||
| from scripts.models import Script | ||||
| from software.models import InstalledSoftware | ||||
| from winupdate.models import WinUpdate, WinUpdatePolicy | ||||
|  | ||||
| from tacticalrmm.constants import ( | ||||
|     AgentHistoryType, | ||||
|     AgentMonType, | ||||
|     AgentPlat, | ||||
|     AlertSeverity, | ||||
|     CheckStatus, | ||||
|     CheckType, | ||||
|     EvtLogFailWhen, | ||||
|     EvtLogNames, | ||||
|     EvtLogTypes, | ||||
|     PAAction, | ||||
|     ScriptShell, | ||||
|     TaskSyncStatus, | ||||
|     TaskType, | ||||
| ) | ||||
| from tacticalrmm.demo_data import ( | ||||
|     check_network_loc_aware_ps1, | ||||
|     check_storage_pool_health_ps1, | ||||
|     clear_print_spool_bat, | ||||
|     disks, | ||||
|     disks_linux_deb, | ||||
|     disks_linux_pi, | ||||
|     ping_fail_output, | ||||
|     ping_success_output, | ||||
|     restart_nla_ps1, | ||||
|     show_temp_dir_py, | ||||
|     spooler_stdout, | ||||
|     temp_dir_stdout, | ||||
|     wmi_deb, | ||||
|     wmi_pi, | ||||
| ) | ||||
| from winupdate.models import WinUpdate, WinUpdatePolicy | ||||
|  | ||||
| AGENTS_TO_GENERATE = 250 | ||||
| AGENTS_TO_GENERATE = 20 | ||||
|  | ||||
| SVCS = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winsvcs.json") | ||||
| WMI_1 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi1.json") | ||||
| @@ -43,18 +67,19 @@ EVT_LOG_FAIL = settings.BASE_DIR.joinpath( | ||||
| class Command(BaseCommand): | ||||
|     help = "populate database with fake agents" | ||||
|  | ||||
|     def rand_string(self, length): | ||||
|     def rand_string(self, length: int) -> str: | ||||
|         chars = string.ascii_letters | ||||
|         return "".join(random.choice(chars) for _ in range(length)) | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|     def handle(self, *args, **kwargs) -> None: | ||||
|  | ||||
|         user = User.objects.first() | ||||
|         user.totp_key = "ABSA234234" | ||||
|         user.save(update_fields=["totp_key"]) | ||||
|         if user: | ||||
|             user.totp_key = "ABSA234234" | ||||
|             user.save(update_fields=["totp_key"]) | ||||
|  | ||||
|         Client.objects.all().delete() | ||||
|         Agent.objects.all().delete() | ||||
|         Client.objects.all().delete() | ||||
|         Check.objects.all().delete() | ||||
|         Script.objects.all().delete() | ||||
|         AutomatedTask.objects.all().delete() | ||||
| @@ -64,6 +89,9 @@ class Command(BaseCommand): | ||||
|         PendingAction.objects.all().delete() | ||||
|  | ||||
|         call_command("load_community_scripts") | ||||
|         call_command("initial_db_setup") | ||||
|         call_command("load_chocos") | ||||
|         call_command("create_installer_user") | ||||
|  | ||||
|         # policies | ||||
|         check_policy = Policy() | ||||
| @@ -94,27 +122,27 @@ class Command(BaseCommand): | ||||
|         update_policy.email_if_fail = True | ||||
|         update_policy.save() | ||||
|  | ||||
|         clients = [ | ||||
|         clients = ( | ||||
|             "Company 1", | ||||
|             "Company 2", | ||||
|             "Company 3", | ||||
|             "Company 1", | ||||
|             "Company 4", | ||||
|             "Company 5", | ||||
|             "Company 6", | ||||
|         ] | ||||
|         sites1 = ["HQ1", "LA Office 1", "NY Office 1"] | ||||
|         sites2 = ["HQ2", "LA Office 2", "NY Office 2"] | ||||
|         sites3 = ["HQ3", "LA Office 3", "NY Office 3"] | ||||
|         sites4 = ["HQ4", "LA Office 4", "NY Office 4"] | ||||
|         sites5 = ["HQ5", "LA Office 5", "NY Office 5"] | ||||
|         sites6 = ["HQ6", "LA Office 6", "NY Office 6"] | ||||
|         ) | ||||
|         sites1 = ("HQ1", "LA Office 1", "NY Office 1") | ||||
|         sites2 = ("HQ2", "LA Office 2", "NY Office 2") | ||||
|         sites3 = ("HQ3", "LA Office 3", "NY Office 3") | ||||
|         sites4 = ("HQ4", "LA Office 4", "NY Office 4") | ||||
|         sites5 = ("HQ5", "LA Office 5", "NY Office 5") | ||||
|         sites6 = ("HQ6", "LA Office 6", "NY Office 6") | ||||
|  | ||||
|         client1 = Client(name="Company 1") | ||||
|         client2 = Client(name="Company 2") | ||||
|         client3 = Client(name="Company 3") | ||||
|         client4 = Client(name="Company 4") | ||||
|         client5 = Client(name="Company 5") | ||||
|         client6 = Client(name="Company 6") | ||||
|         client1 = Client(name=clients[0]) | ||||
|         client2 = Client(name=clients[1]) | ||||
|         client3 = Client(name=clients[2]) | ||||
|         client4 = Client(name=clients[3]) | ||||
|         client5 = Client(name=clients[4]) | ||||
|         client6 = Client(name=clients[5]) | ||||
|  | ||||
|         client1.save() | ||||
|         client2.save() | ||||
| @@ -141,7 +169,7 @@ class Command(BaseCommand): | ||||
|         for site in sites6: | ||||
|             Site(client=client6, name=site).save() | ||||
|  | ||||
|         hostnames = [ | ||||
|         hostnames = ( | ||||
|             "DC-1", | ||||
|             "DC-2", | ||||
|             "FSV-1", | ||||
| @@ -149,26 +177,30 @@ class Command(BaseCommand): | ||||
|             "WSUS", | ||||
|             "DESKTOP-12345", | ||||
|             "LAPTOP-55443", | ||||
|         ] | ||||
|         descriptions = ["Bob's computer", "Primary DC", "File Server", "Karen's Laptop"] | ||||
|         modes = ["server", "workstation"] | ||||
|         op_systems_servers = [ | ||||
|         ) | ||||
|         descriptions = ("Bob's computer", "Primary DC", "File Server", "Karen's Laptop") | ||||
|         modes = AgentMonType.values | ||||
|         op_systems_servers = ( | ||||
|             "Microsoft Windows Server 2016 Standard, 64bit (build 14393)", | ||||
|             "Microsoft Windows Server 2012 R2 Standard, 64bit (build 9600)", | ||||
|             "Microsoft Windows Server 2019 Standard, 64bit (build 17763)", | ||||
|         ] | ||||
|         ) | ||||
|  | ||||
|         op_systems_workstations = [ | ||||
|         op_systems_workstations = ( | ||||
|             "Microsoft Windows 8.1 Pro, 64bit (build 9600)", | ||||
|             "Microsoft Windows 10 Pro for Workstations, 64bit (build 18363)", | ||||
|             "Microsoft Windows 10 Pro, 64bit (build 18363)", | ||||
|         ] | ||||
|         ) | ||||
|  | ||||
|         public_ips = ["65.234.22.4", "74.123.43.5", "44.21.134.45"] | ||||
|         linux_deb_os = "Debian 11.2 x86_64 5.10.0-11-amd64" | ||||
|         linux_pi_os = "Raspbian 11.2 armv7l 5.10.92-v7+" | ||||
|  | ||||
|         total_rams = [4, 8, 16, 32, 64, 128] | ||||
|         public_ips = ("65.234.22.4", "74.123.43.5", "44.21.134.45") | ||||
|  | ||||
|         total_rams = (4, 8, 16, 32, 64, 128) | ||||
|  | ||||
|         now = dt.datetime.now() | ||||
|         django_now = djangotime.now() | ||||
|  | ||||
|         boot_times = [] | ||||
|  | ||||
| @@ -180,7 +212,7 @@ class Command(BaseCommand): | ||||
|             rand_days = now - dt.timedelta(days=random.randint(2, 50)) | ||||
|             boot_times.append(str(rand_days.timestamp())) | ||||
|  | ||||
|         user_names = ["None", "Karen", "Steve", "jsmith", "jdoe"] | ||||
|         user_names = ("None", "Karen", "Steve", "jsmith", "jdoe") | ||||
|  | ||||
|         with open(SVCS) as f: | ||||
|             services = json.load(f) | ||||
| @@ -195,10 +227,7 @@ class Command(BaseCommand): | ||||
|         with open(WMI_3) as f: | ||||
|             wmi3 = json.load(f) | ||||
|  | ||||
|         wmi_details = [] | ||||
|         wmi_details.append(wmi1) | ||||
|         wmi_details.append(wmi2) | ||||
|         wmi_details.append(wmi3) | ||||
|         wmi_details = [i for i in (wmi1, wmi2, wmi3)] | ||||
|  | ||||
|         # software | ||||
|         with open(SW_1) as f: | ||||
| @@ -207,9 +236,7 @@ class Command(BaseCommand): | ||||
|         with open(SW_2) as f: | ||||
|             software2 = json.load(f) | ||||
|  | ||||
|         softwares = [] | ||||
|         softwares.append(software1) | ||||
|         softwares.append(software2) | ||||
|         softwares = [i for i in (software1, software2)] | ||||
|  | ||||
|         # windows updates | ||||
|         with open(WIN_UPDATES) as f: | ||||
| @@ -225,72 +252,97 @@ class Command(BaseCommand): | ||||
|         clear_spool.name = "Clear Print Spooler" | ||||
|         clear_spool.description = "clears the print spooler. Fuck printers" | ||||
|         clear_spool.filename = "clear_print_spool.bat" | ||||
|         clear_spool.shell = "cmd" | ||||
|         clear_spool.shell = ScriptShell.CMD | ||||
|         clear_spool.script_body = clear_print_spool_bat | ||||
|         clear_spool.save() | ||||
|  | ||||
|         check_net_aware = Script() | ||||
|         check_net_aware.name = "Check Network Location Awareness" | ||||
|         check_net_aware.description = "Check's network location awareness on domain computers, should always be domain profile and not public or private. Sometimes happens when computer restarts before domain available. This script will return 0 if check passes or 1 if it fails." | ||||
|         check_net_aware.filename = "check_network_loc_aware.ps1" | ||||
|         check_net_aware.shell = "powershell" | ||||
|         check_net_aware.shell = ScriptShell.POWERSHELL | ||||
|         check_net_aware.script_body = check_network_loc_aware_ps1 | ||||
|         check_net_aware.save() | ||||
|  | ||||
|         check_pool_health = Script() | ||||
|         check_pool_health.name = "Check storage spool health" | ||||
|         check_pool_health.description = "loops through all storage pools and will fail if any of them are not healthy" | ||||
|         check_pool_health.filename = "check_storage_pool_health.ps1" | ||||
|         check_pool_health.shell = "powershell" | ||||
|         check_pool_health.shell = ScriptShell.POWERSHELL | ||||
|         check_pool_health.script_body = check_storage_pool_health_ps1 | ||||
|         check_pool_health.save() | ||||
|  | ||||
|         restart_nla = Script() | ||||
|         restart_nla.name = "Restart NLA Service" | ||||
|         restart_nla.description = "restarts the Network Location Awareness windows service to fix the nic profile. Run this after the check network service fails" | ||||
|         restart_nla.filename = "restart_nla.ps1" | ||||
|         restart_nla.shell = "powershell" | ||||
|         restart_nla.shell = ScriptShell.POWERSHELL | ||||
|         restart_nla.script_body = restart_nla_ps1 | ||||
|         restart_nla.save() | ||||
|  | ||||
|         show_tmp_dir_script = Script() | ||||
|         show_tmp_dir_script.name = "Check temp dir" | ||||
|         show_tmp_dir_script.description = "shows files in temp dir using python" | ||||
|         show_tmp_dir_script.filename = "show_temp_dir.py" | ||||
|         show_tmp_dir_script.shell = "python" | ||||
|         show_tmp_dir_script.shell = ScriptShell.PYTHON | ||||
|         show_tmp_dir_script.script_body = show_temp_dir_py | ||||
|         show_tmp_dir_script.save() | ||||
|  | ||||
|         for count_agents in range(AGENTS_TO_GENERATE): | ||||
|  | ||||
|             client = random.choice(clients) | ||||
|  | ||||
|             if client == "Company 1": | ||||
|             if client == clients[0]: | ||||
|                 site = random.choice(sites1) | ||||
|             elif client == "Company 2": | ||||
|             elif client == clients[1]: | ||||
|                 site = random.choice(sites2) | ||||
|             elif client == "Company 3": | ||||
|             elif client == clients[2]: | ||||
|                 site = random.choice(sites3) | ||||
|             elif client == "Company 4": | ||||
|             elif client == clients[3]: | ||||
|                 site = random.choice(sites4) | ||||
|             elif client == "Company 5": | ||||
|             elif client == clients[4]: | ||||
|                 site = random.choice(sites5) | ||||
|             elif client == "Company 6": | ||||
|             elif client == clients[5]: | ||||
|                 site = random.choice(sites6) | ||||
|  | ||||
|             agent = Agent() | ||||
|  | ||||
|             mode = random.choice(modes) | ||||
|             if mode == "server": | ||||
|                 agent.operating_system = random.choice(op_systems_servers) | ||||
|             plat_pick = random.randint(1, 15) | ||||
|             if plat_pick in (7, 11): | ||||
|                 agent.plat = AgentPlat.LINUX | ||||
|                 mode = AgentMonType.SERVER | ||||
|                 # pi arm | ||||
|                 if plat_pick == 7: | ||||
|                     agent.goarch = "arm" | ||||
|                     agent.wmi_detail = wmi_pi | ||||
|                     agent.disks = disks_linux_pi | ||||
|                     agent.operating_system = linux_pi_os | ||||
|                 else: | ||||
|                     agent.goarch = "amd64" | ||||
|                     agent.wmi_detail = wmi_deb | ||||
|                     agent.disks = disks_linux_deb | ||||
|                     agent.operating_system = linux_deb_os | ||||
|             else: | ||||
|                 agent.operating_system = random.choice(op_systems_workstations) | ||||
|                 agent.plat = AgentPlat.WINDOWS | ||||
|                 agent.goarch = "amd64" | ||||
|                 mode = random.choice(modes) | ||||
|                 agent.wmi_detail = random.choice(wmi_details) | ||||
|                 agent.services = services | ||||
|                 agent.disks = random.choice(disks) | ||||
|                 if mode == AgentMonType.SERVER: | ||||
|                     agent.operating_system = random.choice(op_systems_servers) | ||||
|                 else: | ||||
|                     agent.operating_system = random.choice(op_systems_workstations) | ||||
|  | ||||
|             agent.hostname = random.choice(hostnames) | ||||
|             agent.version = settings.LATEST_AGENT_VER | ||||
|             agent.site = Site.objects.get(name=site) | ||||
|             agent.agent_id = self.rand_string(25) | ||||
|             agent.agent_id = self.rand_string(40) | ||||
|             agent.description = random.choice(descriptions) | ||||
|             agent.monitoring_type = mode | ||||
|             agent.public_ip = random.choice(public_ips) | ||||
|             agent.last_seen = djangotime.now() | ||||
|             agent.plat = "windows" | ||||
|             agent.plat_release = "windows-2019Server" | ||||
|             agent.last_seen = django_now | ||||
|  | ||||
|             agent.total_ram = random.choice(total_rams) | ||||
|             agent.boot_time = random.choice(boot_times) | ||||
|             agent.logged_in_username = random.choice(user_names) | ||||
| @@ -300,40 +352,36 @@ class Command(BaseCommand): | ||||
|             agent.overdue_email_alert = random.choice([True, False]) | ||||
|             agent.overdue_text_alert = random.choice([True, False]) | ||||
|             agent.needs_reboot = random.choice([True, False]) | ||||
|             agent.wmi_detail = random.choice(wmi_details) | ||||
|             agent.services = services | ||||
|             agent.disks = random.choice(disks) | ||||
|  | ||||
|             agent.save() | ||||
|  | ||||
|             InstalledSoftware(agent=agent, software=random.choice(softwares)).save() | ||||
|             if agent.plat == AgentPlat.WINDOWS: | ||||
|                 InstalledSoftware(agent=agent, software=random.choice(softwares)).save() | ||||
|  | ||||
|             if mode == "workstation": | ||||
|             if mode == AgentMonType.WORKSTATION: | ||||
|                 WinUpdatePolicy(agent=agent, run_time_days=[5, 6]).save() | ||||
|             else: | ||||
|                 WinUpdatePolicy(agent=agent).save() | ||||
|  | ||||
|             # windows updates load | ||||
|             guids = [] | ||||
|             for k in windows_updates.keys(): | ||||
|                 guids.append(k) | ||||
|  | ||||
|             for i in guids: | ||||
|                 WinUpdate( | ||||
|                     agent=agent, | ||||
|                     guid=i, | ||||
|                     kb=windows_updates[i]["KBs"][0], | ||||
|                     title=windows_updates[i]["Title"], | ||||
|                     installed=windows_updates[i]["Installed"], | ||||
|                     downloaded=windows_updates[i]["Downloaded"], | ||||
|                     description=windows_updates[i]["Description"], | ||||
|                     severity=windows_updates[i]["Severity"], | ||||
|                 ).save() | ||||
|             if agent.plat == AgentPlat.WINDOWS: | ||||
|                 # windows updates load | ||||
|                 guids = [i for i in windows_updates.keys()] | ||||
|                 for i in guids: | ||||
|                     WinUpdate( | ||||
|                         agent=agent, | ||||
|                         guid=i, | ||||
|                         kb=windows_updates[i]["KBs"][0], | ||||
|                         title=windows_updates[i]["Title"], | ||||
|                         installed=windows_updates[i]["Installed"], | ||||
|                         downloaded=windows_updates[i]["Downloaded"], | ||||
|                         description=windows_updates[i]["Description"], | ||||
|                         severity=windows_updates[i]["Severity"], | ||||
|                     ).save() | ||||
|  | ||||
|             # agent histories | ||||
|             hist = AgentHistory() | ||||
|             hist.agent = agent | ||||
|             hist.type = "cmd_run" | ||||
|             hist.type = AgentHistoryType.CMD_RUN | ||||
|             hist.command = "ping google.com" | ||||
|             hist.username = "demo" | ||||
|             hist.results = ping_success_output | ||||
| @@ -341,7 +389,7 @@ class Command(BaseCommand): | ||||
|  | ||||
|             hist1 = AgentHistory() | ||||
|             hist1.agent = agent | ||||
|             hist1.type = "script_run" | ||||
|             hist1.type = AgentHistoryType.SCRIPT_RUN | ||||
|             hist1.script = clear_spool | ||||
|             hist1.script_results = { | ||||
|                 "id": 1, | ||||
| @@ -352,56 +400,68 @@ class Command(BaseCommand): | ||||
|             } | ||||
|             hist1.save() | ||||
|  | ||||
|             # disk space check | ||||
|             check1 = Check() | ||||
|             check1.agent = agent | ||||
|             check1.check_type = "diskspace" | ||||
|             check1.status = "passing" | ||||
|             check1.last_run = djangotime.now() | ||||
|             check1.more_info = "Total: 498.7GB, Free: 287.4GB" | ||||
|             check1.warning_threshold = 25 | ||||
|             check1.error_threshold = 10 | ||||
|             check1.disk = "C:" | ||||
|             check1.email_alert = random.choice([True, False]) | ||||
|             check1.text_alert = random.choice([True, False]) | ||||
|             check1.save() | ||||
|             if agent.plat == AgentPlat.WINDOWS: | ||||
|                 # disk space check | ||||
|                 check1 = Check() | ||||
|                 check1.agent = agent | ||||
|                 check1.check_type = CheckType.DISK_SPACE | ||||
|                 check1.warning_threshold = 25 | ||||
|                 check1.error_threshold = 10 | ||||
|                 check1.disk = "C:" | ||||
|                 check1.email_alert = random.choice([True, False]) | ||||
|                 check1.text_alert = random.choice([True, False]) | ||||
|                 check1.save() | ||||
|  | ||||
|             for i in range(30): | ||||
|                 check1_history = CheckHistory() | ||||
|                 check1_history.check_id = check1.id | ||||
|                 check1_history.x = djangotime.now() - djangotime.timedelta( | ||||
|                     minutes=i * 2 | ||||
|                 ) | ||||
|                 check1_history.y = random.randint(13, 40) | ||||
|                 check1_history.save() | ||||
|                 check_result1 = CheckResult() | ||||
|                 check_result1.agent = agent | ||||
|                 check_result1.assigned_check = check1 | ||||
|                 check_result1.status = CheckStatus.PASSING | ||||
|                 check_result1.last_run = django_now | ||||
|                 check_result1.more_info = "Total: 498.7GB, Free: 287.4GB" | ||||
|                 check_result1.save() | ||||
|  | ||||
|                 for i in range(30): | ||||
|                     check1_history = CheckHistory() | ||||
|                     check1_history.check_id = check1.pk | ||||
|                     check1_history.agent_id = agent.agent_id | ||||
|                     check1_history.x = django_now - djangotime.timedelta(minutes=i * 2) | ||||
|                     check1_history.y = random.randint(13, 40) | ||||
|                     check1_history.save() | ||||
|  | ||||
|             # ping check | ||||
|             check2 = Check() | ||||
|             check_result2 = CheckResult() | ||||
|  | ||||
|             check2.agent = agent | ||||
|             check2.check_type = "ping" | ||||
|             check2.last_run = djangotime.now() | ||||
|             check2.check_type = CheckType.PING | ||||
|  | ||||
|             check2.email_alert = random.choice([True, False]) | ||||
|             check2.text_alert = random.choice([True, False]) | ||||
|  | ||||
|             check_result2.agent = agent | ||||
|             check_result2.assigned_check = check2 | ||||
|             check_result2.last_run = django_now | ||||
|  | ||||
|             if site in sites5: | ||||
|                 check2.name = "Synology NAS" | ||||
|                 check2.status = "failing" | ||||
|                 check2.alert_severity = AlertSeverity.ERROR | ||||
|                 check_result2.status = CheckStatus.FAILING | ||||
|                 check2.ip = "172.17.14.26" | ||||
|                 check2.more_info = ping_fail_output | ||||
|                 check_result2.more_info = ping_fail_output | ||||
|             else: | ||||
|                 check2.name = "Google" | ||||
|                 check2.status = "passing" | ||||
|                 check_result2.status = CheckStatus.PASSING | ||||
|                 check2.ip = "8.8.8.8" | ||||
|                 check2.more_info = ping_success_output | ||||
|                 check_result2.more_info = ping_success_output | ||||
|  | ||||
|             check2.save() | ||||
|             check_result2.save() | ||||
|  | ||||
|             for i in range(30): | ||||
|                 check2_history = CheckHistory() | ||||
|                 check2_history.check_id = check2.id | ||||
|                 check2_history.x = djangotime.now() - djangotime.timedelta( | ||||
|                     minutes=i * 2 | ||||
|                 ) | ||||
|                 check2_history.check_id = check2.pk | ||||
|                 check2_history.agent_id = agent.agent_id | ||||
|                 check2_history.x = django_now - djangotime.timedelta(minutes=i * 2) | ||||
|                 if site in sites5: | ||||
|                     check2_history.y = 1 | ||||
|                     check2_history.results = ping_fail_output | ||||
| @@ -413,66 +473,97 @@ class Command(BaseCommand): | ||||
|             # cpu load check | ||||
|             check3 = Check() | ||||
|             check3.agent = agent | ||||
|             check3.check_type = "cpuload" | ||||
|             check3.status = "passing" | ||||
|             check3.last_run = djangotime.now() | ||||
|             check3.check_type = CheckType.CPU_LOAD | ||||
|             check3.warning_threshold = 70 | ||||
|             check3.error_threshold = 90 | ||||
|             check3.history = [15, 23, 16, 22, 22, 27, 15, 23, 23, 20, 10, 10, 13, 34] | ||||
|             check3.email_alert = random.choice([True, False]) | ||||
|             check3.text_alert = random.choice([True, False]) | ||||
|             check3.save() | ||||
|  | ||||
|             check_result3 = CheckResult() | ||||
|             check_result3.agent = agent | ||||
|             check_result3.assigned_check = check3 | ||||
|             check_result3.status = CheckStatus.PASSING | ||||
|             check_result3.last_run = django_now | ||||
|             check_result3.history = [ | ||||
|                 15, | ||||
|                 23, | ||||
|                 16, | ||||
|                 22, | ||||
|                 22, | ||||
|                 27, | ||||
|                 15, | ||||
|                 23, | ||||
|                 23, | ||||
|                 20, | ||||
|                 10, | ||||
|                 10, | ||||
|                 13, | ||||
|                 34, | ||||
|             ] | ||||
|             check_result3.save() | ||||
|  | ||||
|             for i in range(30): | ||||
|                 check3_history = CheckHistory() | ||||
|                 check3_history.check_id = check3.id | ||||
|                 check3_history.x = djangotime.now() - djangotime.timedelta( | ||||
|                     minutes=i * 2 | ||||
|                 ) | ||||
|                 check3_history.check_id = check3.pk | ||||
|                 check3_history.agent_id = agent.agent_id | ||||
|                 check3_history.x = django_now - djangotime.timedelta(minutes=i * 2) | ||||
|                 check3_history.y = random.randint(2, 79) | ||||
|                 check3_history.save() | ||||
|  | ||||
|             # memory check | ||||
|             check4 = Check() | ||||
|             check4.agent = agent | ||||
|             check4.check_type = "memory" | ||||
|             check4.status = "passing" | ||||
|             check4.check_type = CheckType.MEMORY | ||||
|             check4.warning_threshold = 70 | ||||
|             check4.error_threshold = 85 | ||||
|             check4.history = [34, 34, 35, 36, 34, 34, 34, 34, 34, 34] | ||||
|             check4.email_alert = random.choice([True, False]) | ||||
|             check4.text_alert = random.choice([True, False]) | ||||
|             check4.save() | ||||
|  | ||||
|             check_result4 = CheckResult() | ||||
|             check_result4.agent = agent | ||||
|             check_result4.assigned_check = check4 | ||||
|             check_result4.status = CheckStatus.PASSING | ||||
|             check_result4.last_run = django_now | ||||
|             check_result4.history = [34, 34, 35, 36, 34, 34, 34, 34, 34, 34] | ||||
|             check_result4.save() | ||||
|  | ||||
|             for i in range(30): | ||||
|                 check4_history = CheckHistory() | ||||
|                 check4_history.check_id = check4.id | ||||
|                 check4_history.x = djangotime.now() - djangotime.timedelta( | ||||
|                     minutes=i * 2 | ||||
|                 ) | ||||
|                 check4_history.check_id = check4.pk | ||||
|                 check4_history.agent_id = agent.agent_id | ||||
|                 check4_history.x = django_now - djangotime.timedelta(minutes=i * 2) | ||||
|                 check4_history.y = random.randint(2, 79) | ||||
|                 check4_history.save() | ||||
|  | ||||
|             # script check storage pool | ||||
|             check5 = Check() | ||||
|  | ||||
|             check5.agent = agent | ||||
|             check5.check_type = "script" | ||||
|             check5.status = "passing" | ||||
|             check5.last_run = djangotime.now() | ||||
|             check5.check_type = CheckType.SCRIPT | ||||
|  | ||||
|             check5.email_alert = random.choice([True, False]) | ||||
|             check5.text_alert = random.choice([True, False]) | ||||
|             check5.timeout = 120 | ||||
|             check5.retcode = 0 | ||||
|             check5.execution_time = "4.0000" | ||||
|  | ||||
|             check5.script = check_pool_health | ||||
|             check5.save() | ||||
|  | ||||
|             check_result5 = CheckResult() | ||||
|             check_result5.agent = agent | ||||
|             check_result5.assigned_check = check5 | ||||
|             check_result5.status = CheckStatus.PASSING | ||||
|             check_result5.last_run = django_now | ||||
|             check_result5.retcode = 0 | ||||
|             check_result5.execution_time = "4.0000" | ||||
|             check_result5.save() | ||||
|  | ||||
|             for i in range(30): | ||||
|                 check5_history = CheckHistory() | ||||
|                 check5_history.check_id = check5.id | ||||
|                 check5_history.x = djangotime.now() - djangotime.timedelta( | ||||
|                     minutes=i * 2 | ||||
|                 ) | ||||
|                 check5_history.check_id = check5.pk | ||||
|                 check5_history.agent_id = agent.agent_id | ||||
|                 check5_history.x = django_now - djangotime.timedelta(minutes=i * 2) | ||||
|                 if i == 10 or i == 18: | ||||
|                     check5_history.y = 1 | ||||
|                 else: | ||||
| @@ -480,28 +571,34 @@ class Command(BaseCommand): | ||||
|                 check5_history.save() | ||||
|  | ||||
|             check6 = Check() | ||||
|  | ||||
|             check6.agent = agent | ||||
|             check6.check_type = "script" | ||||
|             check6.status = "passing" | ||||
|             check6.last_run = djangotime.now() | ||||
|             check6.check_type = CheckType.SCRIPT | ||||
|             check6.email_alert = random.choice([True, False]) | ||||
|             check6.text_alert = random.choice([True, False]) | ||||
|             check6.timeout = 120 | ||||
|             check6.retcode = 0 | ||||
|             check6.execution_time = "4.0000" | ||||
|             check6.script = check_net_aware | ||||
|             check6.save() | ||||
|  | ||||
|             check_result6 = CheckResult() | ||||
|             check_result6.agent = agent | ||||
|             check_result6.assigned_check = check6 | ||||
|             check_result6.status = CheckStatus.PASSING | ||||
|             check_result6.last_run = django_now | ||||
|             check_result6.retcode = 0 | ||||
|             check_result6.execution_time = "4.0000" | ||||
|             check_result6.save() | ||||
|  | ||||
|             for i in range(30): | ||||
|                 check6_history = CheckHistory() | ||||
|                 check6_history.check_id = check6.id | ||||
|                 check6_history.x = djangotime.now() - djangotime.timedelta( | ||||
|                     minutes=i * 2 | ||||
|                 ) | ||||
|                 check6_history.check_id = check6.pk | ||||
|                 check6_history.agent_id = agent.agent_id | ||||
|                 check6_history.x = django_now - djangotime.timedelta(minutes=i * 2) | ||||
|                 check6_history.y = 0 | ||||
|                 check6_history.save() | ||||
|  | ||||
|             nla_task = AutomatedTask() | ||||
|  | ||||
|             nla_task.agent = agent | ||||
|             actions = [ | ||||
|                 { | ||||
| @@ -515,16 +612,21 @@ class Command(BaseCommand): | ||||
|             nla_task.actions = actions | ||||
|             nla_task.assigned_check = check6 | ||||
|             nla_task.name = "Restart NLA" | ||||
|             nla_task.task_type = "checkfailure" | ||||
|             nla_task.win_task_name = "demotask123" | ||||
|             nla_task.execution_time = "1.8443" | ||||
|             nla_task.last_run = djangotime.now() | ||||
|             nla_task.stdout = "no stdout" | ||||
|             nla_task.retcode = 0 | ||||
|             nla_task.sync_status = "synced" | ||||
|             nla_task.task_type = TaskType.CHECK_FAILURE | ||||
|             nla_task.save() | ||||
|  | ||||
|             nla_task_result = TaskResult() | ||||
|             nla_task_result.task = nla_task | ||||
|             nla_task_result.agent = agent | ||||
|             nla_task_result.execution_time = "1.8443" | ||||
|             nla_task_result.last_run = django_now | ||||
|             nla_task_result.stdout = "no stdout" | ||||
|             nla_task_result.retcode = 0 | ||||
|             nla_task_result.sync_status = TaskSyncStatus.SYNCED | ||||
|             nla_task_result.save() | ||||
|  | ||||
|             spool_task = AutomatedTask() | ||||
|  | ||||
|             spool_task.agent = agent | ||||
|             actions = [ | ||||
|                 { | ||||
| @@ -537,23 +639,25 @@ class Command(BaseCommand): | ||||
|             ] | ||||
|             spool_task.actions = actions | ||||
|             spool_task.name = "Clear the print spooler" | ||||
|             spool_task.task_type = "daily" | ||||
|             spool_task.run_time_date = djangotime.now() + djangotime.timedelta( | ||||
|                 minutes=10 | ||||
|             ) | ||||
|             spool_task.expire_date = djangotime.now() + djangotime.timedelta(days=753) | ||||
|             spool_task.task_type = TaskType.DAILY | ||||
|             spool_task.run_time_date = django_now + djangotime.timedelta(minutes=10) | ||||
|             spool_task.expire_date = django_now + djangotime.timedelta(days=753) | ||||
|             spool_task.daily_interval = 1 | ||||
|             spool_task.weekly_interval = 1 | ||||
|             spool_task.task_repetition_duration = "2h" | ||||
|             spool_task.task_repetition_interval = "25m" | ||||
|             spool_task.random_task_delay = "3m" | ||||
|             spool_task.win_task_name = "demospool123" | ||||
|             spool_task.last_run = djangotime.now() | ||||
|             spool_task.retcode = 0 | ||||
|             spool_task.stdout = spooler_stdout | ||||
|             spool_task.sync_status = "synced" | ||||
|             spool_task.save() | ||||
|  | ||||
|             spool_task_result = TaskResult() | ||||
|             spool_task_result.task = spool_task | ||||
|             spool_task_result.agent = agent | ||||
|             spool_task_result.last_run = django_now | ||||
|             spool_task_result.retcode = 0 | ||||
|             spool_task_result.stdout = spooler_stdout | ||||
|             spool_task_result.sync_status = TaskSyncStatus.SYNCED | ||||
|             spool_task_result.save() | ||||
|  | ||||
|             tmp_dir_task = AutomatedTask() | ||||
|             tmp_dir_task.agent = agent | ||||
|             tmp_dir_task.name = "show temp dir files" | ||||
| @@ -567,130 +671,148 @@ class Command(BaseCommand): | ||||
|                 } | ||||
|             ] | ||||
|             tmp_dir_task.actions = actions | ||||
|             tmp_dir_task.task_type = "manual" | ||||
|             tmp_dir_task.win_task_name = "demotemp" | ||||
|             tmp_dir_task.last_run = djangotime.now() | ||||
|             tmp_dir_task.stdout = temp_dir_stdout | ||||
|             tmp_dir_task.retcode = 0 | ||||
|             tmp_dir_task.sync_status = "synced" | ||||
|             tmp_dir_task.task_type = TaskType.MANUAL | ||||
|             tmp_dir_task.save() | ||||
|  | ||||
|             tmp_dir_task_result = TaskResult() | ||||
|             tmp_dir_task_result.task = tmp_dir_task | ||||
|             tmp_dir_task_result.agent = agent | ||||
|             tmp_dir_task_result.last_run = django_now | ||||
|             tmp_dir_task_result.stdout = temp_dir_stdout | ||||
|             tmp_dir_task_result.retcode = 0 | ||||
|             tmp_dir_task_result.sync_status = TaskSyncStatus.SYNCED | ||||
|             tmp_dir_task_result.save() | ||||
|  | ||||
|             check7 = Check() | ||||
|  | ||||
|             check7.agent = agent | ||||
|             check7.check_type = "script" | ||||
|             check7.status = "passing" | ||||
|             check7.last_run = djangotime.now() | ||||
|             check7.check_type = CheckType.SCRIPT | ||||
|  | ||||
|             check7.email_alert = random.choice([True, False]) | ||||
|             check7.text_alert = random.choice([True, False]) | ||||
|             check7.timeout = 120 | ||||
|             check7.retcode = 0 | ||||
|             check7.execution_time = "3.1337" | ||||
|  | ||||
|             check7.script = clear_spool | ||||
|             check7.stdout = spooler_stdout | ||||
|  | ||||
|             check7.save() | ||||
|  | ||||
|             check_result7 = CheckResult() | ||||
|             check_result7.assigned_check = check7 | ||||
|             check_result7.agent = agent | ||||
|             check_result7.status = CheckStatus.PASSING | ||||
|             check_result7.last_run = django_now | ||||
|             check_result7.retcode = 0 | ||||
|             check_result7.execution_time = "3.1337" | ||||
|             check_result7.stdout = spooler_stdout | ||||
|             check_result7.save() | ||||
|  | ||||
|             for i in range(30): | ||||
|                 check7_history = CheckHistory() | ||||
|                 check7_history.check_id = check7.id | ||||
|                 check7_history.x = djangotime.now() - djangotime.timedelta( | ||||
|                     minutes=i * 2 | ||||
|                 ) | ||||
|                 check7_history.check_id = check7.pk | ||||
|                 check7_history.agent_id = agent.agent_id | ||||
|                 check7_history.x = django_now - djangotime.timedelta(minutes=i * 2) | ||||
|                 check7_history.y = 0 | ||||
|                 check7_history.save() | ||||
|  | ||||
|             check8 = Check() | ||||
|             check8.agent = agent | ||||
|             check8.check_type = "winsvc" | ||||
|             check8.status = "passing" | ||||
|             check8.last_run = djangotime.now() | ||||
|             check8.email_alert = random.choice([True, False]) | ||||
|             check8.text_alert = random.choice([True, False]) | ||||
|             check8.more_info = "Status RUNNING" | ||||
|             check8.fails_b4_alert = 4 | ||||
|             check8.svc_name = "Spooler" | ||||
|             check8.svc_display_name = "Print Spooler" | ||||
|             check8.pass_if_start_pending = False | ||||
|             check8.restart_if_stopped = True | ||||
|             check8.save() | ||||
|             if agent.plat == AgentPlat.WINDOWS: | ||||
|                 check8 = Check() | ||||
|                 check8.agent = agent | ||||
|                 check8.check_type = CheckType.WINSVC | ||||
|                 check8.email_alert = random.choice([True, False]) | ||||
|                 check8.text_alert = random.choice([True, False]) | ||||
|                 check8.fails_b4_alert = 4 | ||||
|                 check8.svc_name = "Spooler" | ||||
|                 check8.svc_display_name = "Print Spooler" | ||||
|                 check8.pass_if_start_pending = False | ||||
|                 check8.restart_if_stopped = True | ||||
|                 check8.save() | ||||
|  | ||||
|             for i in range(30): | ||||
|                 check8_history = CheckHistory() | ||||
|                 check8_history.check_id = check8.id | ||||
|                 check8_history.x = djangotime.now() - djangotime.timedelta( | ||||
|                     minutes=i * 2 | ||||
|                 ) | ||||
|                 if i == 10 or i == 18: | ||||
|                     check8_history.y = 1 | ||||
|                     check8_history.results = "Status STOPPED" | ||||
|                 check_result8 = CheckResult() | ||||
|                 check_result8.assigned_check = check8 | ||||
|                 check_result8.agent = agent | ||||
|                 check_result8.status = CheckStatus.PASSING | ||||
|                 check_result8.last_run = django_now | ||||
|                 check_result8.more_info = "Status RUNNING" | ||||
|                 check_result8.save() | ||||
|  | ||||
|                 for i in range(30): | ||||
|                     check8_history = CheckHistory() | ||||
|                     check8_history.check_id = check8.pk | ||||
|                     check8_history.agent_id = agent.agent_id | ||||
|                     check8_history.x = django_now - djangotime.timedelta(minutes=i * 2) | ||||
|                     if i == 10 or i == 18: | ||||
|                         check8_history.y = 1 | ||||
|                         check8_history.results = "Status STOPPED" | ||||
|                     else: | ||||
|                         check8_history.y = 0 | ||||
|                         check8_history.results = "Status RUNNING" | ||||
|                     check8_history.save() | ||||
|  | ||||
|                 check9 = Check() | ||||
|                 check9.agent = agent | ||||
|                 check9.check_type = CheckType.EVENT_LOG | ||||
|                 check9.name = "unexpected shutdown" | ||||
|                 check9.email_alert = random.choice([True, False]) | ||||
|                 check9.text_alert = random.choice([True, False]) | ||||
|                 check9.fails_b4_alert = 2 | ||||
|                 check9.log_name = EvtLogNames.APPLICATION | ||||
|                 check9.event_id = 1001 | ||||
|                 check9.event_type = EvtLogTypes.INFO | ||||
|                 check9.fail_when = EvtLogFailWhen.CONTAINS | ||||
|                 check9.search_last_days = 30 | ||||
|  | ||||
|                 check_result9 = CheckResult() | ||||
|                 check_result9.agent = agent | ||||
|                 check_result9.assigned_check = check9 | ||||
|  | ||||
|                 check_result9.last_run = django_now | ||||
|                 if site in sites5: | ||||
|                     check_result9.extra_details = eventlog_check_fail_data | ||||
|                     check_result9.status = CheckStatus.FAILING | ||||
|                 else: | ||||
|                     check8_history.y = 0 | ||||
|                     check8_history.results = "Status RUNNING" | ||||
|                 check8_history.save() | ||||
|                     check_result9.extra_details = {"log": []} | ||||
|                     check_result9.status = CheckStatus.PASSING | ||||
|  | ||||
|             check9 = Check() | ||||
|             check9.agent = agent | ||||
|             check9.check_type = "eventlog" | ||||
|             check9.name = "unexpected shutdown" | ||||
|                 check9.save() | ||||
|                 check_result9.save() | ||||
|  | ||||
|             check9.last_run = djangotime.now() | ||||
|             check9.email_alert = random.choice([True, False]) | ||||
|             check9.text_alert = random.choice([True, False]) | ||||
|             check9.fails_b4_alert = 2 | ||||
|                 for i in range(30): | ||||
|                     check9_history = CheckHistory() | ||||
|                     check9_history.check_id = check9.pk | ||||
|                     check9_history.agent_id = agent.agent_id | ||||
|                     check9_history.x = django_now - djangotime.timedelta(minutes=i * 2) | ||||
|                     if i == 10 or i == 18: | ||||
|                         check9_history.y = 1 | ||||
|                         check9_history.results = "Events Found: 16" | ||||
|                     else: | ||||
|                         check9_history.y = 0 | ||||
|                         check9_history.results = "Events Found: 0" | ||||
|                     check9_history.save() | ||||
|  | ||||
|             if site in sites5: | ||||
|                 check9.extra_details = eventlog_check_fail_data | ||||
|                 check9.status = "failing" | ||||
|             else: | ||||
|                 check9.extra_details = {"log": []} | ||||
|                 check9.status = "passing" | ||||
|                 pick = random.randint(1, 10) | ||||
|  | ||||
|             check9.log_name = "Application" | ||||
|             check9.event_id = 1001 | ||||
|             check9.event_type = "INFO" | ||||
|             check9.fail_when = "contains" | ||||
|             check9.search_last_days = 30 | ||||
|                 if pick == 5 or pick == 3: | ||||
|  | ||||
|             check9.save() | ||||
|                     reboot_time = django_now + djangotime.timedelta( | ||||
|                         minutes=random.randint(1000, 500000) | ||||
|                     ) | ||||
|                     date_obj = dt.datetime.strftime(reboot_time, "%Y-%m-%d %H:%M") | ||||
|  | ||||
|             for i in range(30): | ||||
|                 check9_history = CheckHistory() | ||||
|                 check9_history.check_id = check9.id | ||||
|                 check9_history.x = djangotime.now() - djangotime.timedelta( | ||||
|                     minutes=i * 2 | ||||
|                 ) | ||||
|                 if i == 10 or i == 18: | ||||
|                     check9_history.y = 1 | ||||
|                     check9_history.results = "Events Found: 16" | ||||
|                 else: | ||||
|                     check9_history.y = 0 | ||||
|                     check9_history.results = "Events Found: 0" | ||||
|                 check9_history.save() | ||||
|                     obj = dt.datetime.strptime(date_obj, "%Y-%m-%d %H:%M") | ||||
|  | ||||
|             pick = random.randint(1, 10) | ||||
|                     task_name = "TacticalRMM_SchedReboot_" + "".join( | ||||
|                         random.choice(string.ascii_letters) for _ in range(10) | ||||
|                     ) | ||||
|  | ||||
|             if pick == 5 or pick == 3: | ||||
|  | ||||
|                 reboot_time = djangotime.now() + djangotime.timedelta( | ||||
|                     minutes=random.randint(1000, 500000) | ||||
|                 ) | ||||
|                 date_obj = dt.datetime.strftime(reboot_time, "%Y-%m-%d %H:%M") | ||||
|  | ||||
|                 obj = dt.datetime.strptime(date_obj, "%Y-%m-%d %H:%M") | ||||
|  | ||||
|                 task_name = "TacticalRMM_SchedReboot_" + "".join( | ||||
|                     random.choice(string.ascii_letters) for _ in range(10) | ||||
|                 ) | ||||
|  | ||||
|                 sched_reboot = PendingAction() | ||||
|                 sched_reboot.agent = agent | ||||
|                 sched_reboot.action_type = "schedreboot" | ||||
|                 sched_reboot.details = { | ||||
|                     "time": str(obj), | ||||
|                     "taskname": task_name, | ||||
|                 } | ||||
|                 sched_reboot.save() | ||||
|                     sched_reboot = PendingAction() | ||||
|                     sched_reboot.agent = agent | ||||
|                     sched_reboot.action_type = PAAction.SCHED_REBOOT | ||||
|                     sched_reboot.details = { | ||||
|                         "time": str(obj), | ||||
|                         "taskname": task_name, | ||||
|                     } | ||||
|                     sched_reboot.save() | ||||
|  | ||||
|             self.stdout.write(self.style.SUCCESS(f"Added agent # {count_agents + 1}")) | ||||
|  | ||||
|         call_command("load_demo_scripts") | ||||
|         self.stdout.write("done") | ||||
|   | ||||
							
								
								
									
										30
									
								
								api/tacticalrmm/agents/management/commands/find_services.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								api/tacticalrmm/agents/management/commands/find_services.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,30 @@ | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from agents.models import Agent | ||||
| from tacticalrmm.constants import AGENT_DEFER | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Find all agents that have a certain service installed" | ||||
|  | ||||
|     def add_arguments(self, parser): | ||||
|         parser.add_argument("name", type=str) | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|         search = kwargs["name"].lower() | ||||
|  | ||||
|         agents = Agent.objects.defer(*AGENT_DEFER) | ||||
|         for agent in agents: | ||||
|             try: | ||||
|                 for svc in agent.services: | ||||
|                     if ( | ||||
|                         search in svc["name"].lower() | ||||
|                         or search in svc["display_name"].lower() | ||||
|                     ): | ||||
|                         self.stdout.write( | ||||
|                             self.style.SUCCESS( | ||||
|                                 f"{agent.hostname} - {svc['name']} ({svc['display_name']}) - {svc['status']}" | ||||
|                             ) | ||||
|                         ) | ||||
|             except: | ||||
|                 continue | ||||
| @@ -1,16 +1,17 @@ | ||||
| from agents.models import Agent | ||||
| from django.conf import settings | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from agents.models import Agent | ||||
| from tacticalrmm.constants import AGENT_STATUS_ONLINE, ONLINE_AGENTS | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Shows online agents that are not on the latest version" | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|         q = Agent.objects.exclude(version=settings.LATEST_AGENT_VER).only( | ||||
|             "pk", "version", "last_seen", "overdue_time", "offline_time" | ||||
|         ) | ||||
|         agents = [i for i in q if i.status == "online"] | ||||
|         only = ONLINE_AGENTS + ("hostname",) | ||||
|         q = Agent.objects.exclude(version=settings.LATEST_AGENT_VER).only(*only) | ||||
|         agents = [i for i in q if i.status == AGENT_STATUS_ONLINE] | ||||
|         for agent in agents: | ||||
|             self.stdout.write( | ||||
|                 self.style.SUCCESS(f"{agent.hostname} - v{agent.version}") | ||||
|   | ||||
| @@ -1,10 +1,10 @@ | ||||
| from agents.models import Agent | ||||
| from agents.tasks import send_agent_update_task | ||||
| from core.models import CoreSettings | ||||
| from django.conf import settings | ||||
| from django.core.management.base import BaseCommand | ||||
| from packaging import version as pyver | ||||
|  | ||||
| from agents.models import Agent | ||||
| from agents.tasks import send_agent_update_task | ||||
| from core.utils import get_core_settings, token_is_valid | ||||
| from tacticalrmm.constants import AGENT_DEFER | ||||
|  | ||||
|  | ||||
| @@ -12,8 +12,8 @@ class Command(BaseCommand): | ||||
|     help = "Triggers an agent update task to run" | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|         core = CoreSettings.objects.first() | ||||
|         if not core.agent_auto_update:  # type: ignore | ||||
|         core = get_core_settings() | ||||
|         if not core.agent_auto_update: | ||||
|             return | ||||
|  | ||||
|         q = Agent.objects.defer(*AGENT_DEFER).exclude(version=settings.LATEST_AGENT_VER) | ||||
| @@ -22,4 +22,5 @@ class Command(BaseCommand): | ||||
|             for i in q | ||||
|             if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER) | ||||
|         ] | ||||
|         send_agent_update_task.delay(agent_ids=agent_ids) | ||||
|         token, _ = token_is_valid() | ||||
|         send_agent_update_task.delay(agent_ids=agent_ids, token=token, force=False) | ||||
|   | ||||
| @@ -0,0 +1,26 @@ | ||||
| # Generated by Django 4.0.3 on 2022-04-07 17:28 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('clients', '0020_auto_20211226_0547'), | ||||
|         ('agents', '0046_alter_agenthistory_command'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='agent', | ||||
|             name='plat', | ||||
|             field=models.CharField(default='windows', max_length=255), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='agent', | ||||
|             name='site', | ||||
|             field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.RESTRICT, related_name='agents', to='clients.site'), | ||||
|             preserve_default=False, | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,21 @@ | ||||
| # Generated by Django 4.0.3 on 2022-04-16 17:39 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0047_alter_agent_plat_alter_agent_site'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='agent', | ||||
|             name='has_patches_pending', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='agent', | ||||
|             name='pending_actions_count', | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,17 @@ | ||||
| # Generated by Django 4.0.3 on 2022-04-18 14:29 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0048_remove_agent_has_patches_pending_and_more'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddIndex( | ||||
|             model_name='agent', | ||||
|             index=models.Index(fields=['monitoring_type'], name='agents_agen_monitor_df8816_idx'), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,17 @@ | ||||
| # Generated by Django 4.0.4 on 2022-04-25 06:51 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0049_agent_agents_agen_monitor_df8816_idx'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='agent', | ||||
|             name='plat_release', | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										18
									
								
								api/tacticalrmm/agents/migrations/0051_alter_agent_plat.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								api/tacticalrmm/agents/migrations/0051_alter_agent_plat.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 4.0.4 on 2022-05-18 03:50 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0050_remove_agent_plat_release'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='agent', | ||||
|             name='plat', | ||||
|             field=models.CharField(choices=[('windows', 'Windows'), ('linux', 'Linux'), ('darwin', 'macOS')], default='windows', max_length=255), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 4.0.4 on 2022-05-18 05:28 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0051_alter_agent_plat'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='agent', | ||||
|             name='monitoring_type', | ||||
|             field=models.CharField(choices=[('server', 'Server'), ('workstation', 'Workstation')], default='server', max_length=30), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,17 @@ | ||||
| # Generated by Django 4.0.4 on 2022-05-18 06:10 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0052_alter_agent_monitoring_type'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='agenthistory', | ||||
|             name='status', | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										18
									
								
								api/tacticalrmm/agents/migrations/0054_alter_agent_goarch.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								api/tacticalrmm/agents/migrations/0054_alter_agent_goarch.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 4.0.4 on 2022-06-06 04:03 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0053_remove_agenthistory_status'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='agent', | ||||
|             name='goarch', | ||||
|             field=models.CharField(blank=True, choices=[('amd64', 'amd64'), ('386', '386'), ('arm64', 'arm64'), ('arm', 'arm')], max_length=255, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,38 +1,73 @@ | ||||
| import asyncio | ||||
| import base64 | ||||
| import re | ||||
| import time | ||||
| from collections import Counter | ||||
| from distutils.version import LooseVersion | ||||
| from typing import Any | ||||
| from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union, cast | ||||
|  | ||||
| import msgpack | ||||
| import nats | ||||
| import validators | ||||
| from asgiref.sync import sync_to_async | ||||
| from core.models import TZ_CHOICES, CoreSettings | ||||
| from Crypto.Cipher import AES | ||||
| from Crypto.Hash import SHA3_384 | ||||
| from Crypto.Random import get_random_bytes | ||||
| from Crypto.Util.Padding import pad | ||||
| from django.conf import settings | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.core.cache import cache | ||||
| from django.db import models | ||||
| from django.utils import timezone as djangotime | ||||
| from logs.models import BaseAuditModel, DebugLog | ||||
| from nats.errors import TimeoutError | ||||
| from packaging import version as pyver | ||||
|  | ||||
| from agents.utils import get_agent_url | ||||
| from core.models import TZ_CHOICES | ||||
| from core.utils import get_core_settings, send_command_with_mesh | ||||
| from logs.models import BaseAuditModel, DebugLog, PendingAction | ||||
| from tacticalrmm.constants import ( | ||||
|     AGENT_STATUS_OFFLINE, | ||||
|     AGENT_STATUS_ONLINE, | ||||
|     AGENT_STATUS_OVERDUE, | ||||
|     ONLINE_AGENTS, | ||||
|     AgentHistoryType, | ||||
|     AgentMonType, | ||||
|     AgentPlat, | ||||
|     AlertSeverity, | ||||
|     CheckStatus, | ||||
|     CheckType, | ||||
|     CustomFieldType, | ||||
|     DebugLogType, | ||||
|     GoArch, | ||||
|     PAAction, | ||||
|     PAStatus, | ||||
| ) | ||||
| from tacticalrmm.helpers import get_nats_ports | ||||
| from tacticalrmm.models import PermissionQuerySet | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from alerts.models import Alert, AlertTemplate | ||||
|     from automation.models import Policy | ||||
|     from autotasks.models import AutomatedTask | ||||
|     from checks.models import Check | ||||
|     from clients.models import Client | ||||
|     from winupdate.models import WinUpdatePolicy | ||||
|  | ||||
| # type helpers | ||||
| Disk = Union[Dict[str, Any], str] | ||||
|  | ||||
|  | ||||
| class Agent(BaseAuditModel): | ||||
|     class Meta: | ||||
|         indexes = [ | ||||
|             models.Index(fields=["monitoring_type"]), | ||||
|         ] | ||||
|  | ||||
|     objects = PermissionQuerySet.as_manager() | ||||
|  | ||||
|     version = models.CharField(default="0.1.0", max_length=255) | ||||
|     operating_system = models.CharField(null=True, blank=True, max_length=255) | ||||
|     plat = models.CharField(max_length=255, null=True, blank=True) | ||||
|     goarch = models.CharField(max_length=255, null=True, blank=True) | ||||
|     plat_release = models.CharField(max_length=255, null=True, blank=True) | ||||
|     plat: "AgentPlat" = models.CharField(  # type: ignore | ||||
|         max_length=255, choices=AgentPlat.choices, default=AgentPlat.WINDOWS | ||||
|     ) | ||||
|     goarch: "GoArch" = models.CharField(  # type: ignore | ||||
|         max_length=255, choices=GoArch.choices, null=True, blank=True | ||||
|     ) | ||||
|     hostname = models.CharField(max_length=255) | ||||
|     agent_id = models.CharField(max_length=200, unique=True) | ||||
|     last_seen = models.DateTimeField(null=True, blank=True) | ||||
| @@ -43,7 +78,9 @@ class Agent(BaseAuditModel): | ||||
|     boot_time = models.FloatField(null=True, blank=True) | ||||
|     logged_in_username = models.CharField(null=True, blank=True, max_length=255) | ||||
|     last_logged_in_user = models.CharField(null=True, blank=True, max_length=255) | ||||
|     monitoring_type = models.CharField(max_length=30) | ||||
|     monitoring_type = models.CharField( | ||||
|         max_length=30, choices=AgentMonType.choices, default=AgentMonType.SERVER | ||||
|     ) | ||||
|     description = models.CharField(null=True, blank=True, max_length=255) | ||||
|     mesh_node_id = models.CharField(null=True, blank=True, max_length=255) | ||||
|     overdue_email_alert = models.BooleanField(default=False) | ||||
| @@ -61,8 +98,6 @@ class Agent(BaseAuditModel): | ||||
|     ) | ||||
|     maintenance_mode = models.BooleanField(default=False) | ||||
|     block_policy_inheritance = models.BooleanField(default=False) | ||||
|     pending_actions_count = models.PositiveIntegerField(default=0) | ||||
|     has_patches_pending = models.BooleanField(default=False) | ||||
|     alert_template = models.ForeignKey( | ||||
|         "alerts.AlertTemplate", | ||||
|         related_name="agents", | ||||
| @@ -73,9 +108,7 @@ class Agent(BaseAuditModel): | ||||
|     site = models.ForeignKey( | ||||
|         "clients.Site", | ||||
|         related_name="agents", | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         on_delete=models.SET_NULL, | ||||
|         on_delete=models.RESTRICT, | ||||
|     ) | ||||
|     policy = models.ForeignKey( | ||||
|         "automation.Policy", | ||||
| @@ -85,49 +118,28 @@ class Agent(BaseAuditModel): | ||||
|         on_delete=models.SET_NULL, | ||||
|     ) | ||||
|  | ||||
|     def save(self, *args, **kwargs): | ||||
|         # get old agent if exists | ||||
|         old_agent = Agent.objects.get(pk=self.pk) if self.pk else None | ||||
|         super(Agent, self).save(old_model=old_agent, *args, **kwargs) | ||||
|  | ||||
|         # check if new agent has been created | ||||
|         # or check if policy have changed on agent | ||||
|         # or if site has changed on agent and if so generate policies | ||||
|         # or if agent was changed from server or workstation | ||||
|         if ( | ||||
|             not old_agent | ||||
|             or (old_agent and old_agent.policy != self.policy) | ||||
|             or (old_agent.site != self.site) | ||||
|             or (old_agent.monitoring_type != self.monitoring_type) | ||||
|             or (old_agent.block_policy_inheritance != self.block_policy_inheritance) | ||||
|         ): | ||||
|             from automation.tasks import generate_agent_checks_task | ||||
|  | ||||
|             generate_agent_checks_task.delay(agents=[self.pk], create_tasks=True) | ||||
|  | ||||
|     def __str__(self): | ||||
|     def __str__(self) -> str: | ||||
|         return self.hostname | ||||
|  | ||||
|     @property | ||||
|     def client(self): | ||||
|     def client(self) -> "Client": | ||||
|         return self.site.client | ||||
|  | ||||
|     @property | ||||
|     def timezone(self): | ||||
|     def timezone(self) -> str: | ||||
|         # return the default timezone unless the timezone is explicity set per agent | ||||
|         if self.time_zone is not None: | ||||
|         if self.time_zone: | ||||
|             return self.time_zone | ||||
|         else: | ||||
|             from core.models import CoreSettings | ||||
|  | ||||
|             return CoreSettings.objects.first().default_time_zone  # type: ignore | ||||
|             return get_core_settings().default_time_zone | ||||
|  | ||||
|     @property | ||||
|     def is_posix(self): | ||||
|         return self.plat == "linux" or self.plat == "darwin" | ||||
|     def is_posix(self) -> bool: | ||||
|         return self.plat in {AgentPlat.LINUX, AgentPlat.DARWIN} | ||||
|  | ||||
|     # DEPRECATED, use goarch instead | ||||
|     @property | ||||
|     def arch(self): | ||||
|     def arch(self) -> Optional[str]: | ||||
|         if self.is_posix: | ||||
|             return self.goarch | ||||
|  | ||||
| @@ -138,53 +150,102 @@ class Agent(BaseAuditModel): | ||||
|                 return "32" | ||||
|         return None | ||||
|  | ||||
|     @property | ||||
|     def winagent_dl(self): | ||||
|         if self.arch == "64": | ||||
|             return settings.DL_64 | ||||
|         elif self.arch == "32": | ||||
|             return settings.DL_32 | ||||
|         return None | ||||
|     def do_update(self, *, token: str = "", force: bool = False) -> str: | ||||
|         ver = settings.LATEST_AGENT_VER | ||||
|  | ||||
|         if not self.goarch: | ||||
|             DebugLog.warning( | ||||
|                 agent=self, | ||||
|                 log_type=DebugLogType.AGENT_ISSUES, | ||||
|                 message=f"Unable to determine arch on {self.hostname}({self.agent_id}). Skipping agent update.", | ||||
|             ) | ||||
|             return "noarch" | ||||
|  | ||||
|         if pyver.parse(self.version) <= pyver.parse("1.3.0"): | ||||
|             return "not supported" | ||||
|  | ||||
|         url = get_agent_url(goarch=self.goarch, plat=self.plat, token=token) | ||||
|         bin = f"tacticalagent-v{ver}-{self.plat}-{self.goarch}.exe" | ||||
|  | ||||
|         if not force: | ||||
|             if self.pendingactions.filter(  # type: ignore | ||||
|                 action_type=PAAction.AGENT_UPDATE, status=PAStatus.PENDING | ||||
|             ).exists(): | ||||
|                 self.pendingactions.filter(  # type: ignore | ||||
|                     action_type=PAAction.AGENT_UPDATE, status=PAStatus.PENDING | ||||
|                 ).delete() | ||||
|  | ||||
|             PendingAction.objects.create( | ||||
|                 agent=self, | ||||
|                 action_type=PAAction.AGENT_UPDATE, | ||||
|                 details={ | ||||
|                     "url": url, | ||||
|                     "version": ver, | ||||
|                     "inno": bin, | ||||
|                 }, | ||||
|             ) | ||||
|  | ||||
|         nats_data = { | ||||
|             "func": "agentupdate", | ||||
|             "payload": { | ||||
|                 "url": url, | ||||
|                 "version": ver, | ||||
|                 "inno": bin, | ||||
|             }, | ||||
|         } | ||||
|         asyncio.run(self.nats_cmd(nats_data, wait=False)) | ||||
|         return "created" | ||||
|  | ||||
|     @property | ||||
|     def win_inno_exe(self): | ||||
|         if self.arch == "64": | ||||
|             return f"winagent-v{settings.LATEST_AGENT_VER}.exe" | ||||
|         elif self.arch == "32": | ||||
|             return f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe" | ||||
|         return None | ||||
|  | ||||
|     @property | ||||
|     def status(self): | ||||
|     def status(self) -> str: | ||||
|         offline = djangotime.now() - djangotime.timedelta(minutes=self.offline_time) | ||||
|         overdue = djangotime.now() - djangotime.timedelta(minutes=self.overdue_time) | ||||
|  | ||||
|         if self.last_seen is not None: | ||||
|             if (self.last_seen < offline) and (self.last_seen > overdue): | ||||
|                 return "offline" | ||||
|                 return AGENT_STATUS_OFFLINE | ||||
|             elif (self.last_seen < offline) and (self.last_seen < overdue): | ||||
|                 return "overdue" | ||||
|                 return AGENT_STATUS_OVERDUE | ||||
|             else: | ||||
|                 return "online" | ||||
|                 return AGENT_STATUS_ONLINE | ||||
|         else: | ||||
|             return "offline" | ||||
|             return AGENT_STATUS_OFFLINE | ||||
|  | ||||
|     @property | ||||
|     def checks(self): | ||||
|     def checks(self) -> Dict[str, Any]: | ||||
|         from checks.models import CheckResult | ||||
|  | ||||
|         total, passing, failing, warning, info = 0, 0, 0, 0, 0 | ||||
|  | ||||
|         if self.agentchecks.exists():  # type: ignore | ||||
|             for i in self.agentchecks.all():  # type: ignore | ||||
|                 total += 1 | ||||
|                 if i.status == "passing": | ||||
|                     passing += 1 | ||||
|                 elif i.status == "failing": | ||||
|                     if i.alert_severity == "error": | ||||
|                         failing += 1 | ||||
|                     elif i.alert_severity == "warning": | ||||
|                         warning += 1 | ||||
|                     elif i.alert_severity == "info": | ||||
|                         info += 1 | ||||
|         for check in self.get_checks_with_policies(exclude_overridden=True): | ||||
|             total += 1 | ||||
|             if ( | ||||
|                 not hasattr(check.check_result, "status") | ||||
|                 or isinstance(check.check_result, CheckResult) | ||||
|                 and check.check_result.status == CheckStatus.PASSING | ||||
|             ): | ||||
|                 passing += 1 | ||||
|             elif ( | ||||
|                 isinstance(check.check_result, CheckResult) | ||||
|                 and check.check_result.status == CheckStatus.FAILING | ||||
|             ): | ||||
|                 alert_severity = ( | ||||
|                     check.check_result.alert_severity | ||||
|                     if check.check_type | ||||
|                     in [ | ||||
|                         CheckType.MEMORY, | ||||
|                         CheckType.CPU_LOAD, | ||||
|                         CheckType.DISK_SPACE, | ||||
|                         CheckType.SCRIPT, | ||||
|                     ] | ||||
|                     else check.alert_severity | ||||
|                 ) | ||||
|                 if alert_severity == AlertSeverity.ERROR: | ||||
|                     failing += 1 | ||||
|                 elif alert_severity == AlertSeverity.WARNING: | ||||
|                     warning += 1 | ||||
|                 elif alert_severity == AlertSeverity.INFO: | ||||
|                     info += 1 | ||||
|  | ||||
|         ret = { | ||||
|             "total": total, | ||||
| @@ -197,10 +258,10 @@ class Agent(BaseAuditModel): | ||||
|         return ret | ||||
|  | ||||
|     @property | ||||
|     def cpu_model(self): | ||||
|     def cpu_model(self) -> List[str]: | ||||
|         if self.is_posix: | ||||
|             try: | ||||
|                 return self.wmi_detail["cpus"] | ||||
|                 return cast(List[str], self.wmi_detail["cpus"]) | ||||
|             except: | ||||
|                 return ["unknown cpu model"] | ||||
|  | ||||
| @@ -214,12 +275,13 @@ class Agent(BaseAuditModel): | ||||
|             return ["unknown cpu model"] | ||||
|  | ||||
|     @property | ||||
|     def graphics(self): | ||||
|     def graphics(self) -> str: | ||||
|         if self.is_posix: | ||||
|             try: | ||||
|                 if not self.wmi_detail["gpus"]: | ||||
|                     return "No graphics cards" | ||||
|                 return self.wmi_detail["gpus"] | ||||
|  | ||||
|                 return ", ".join(self.wmi_detail["gpus"]) | ||||
|             except: | ||||
|                 return "Error getting graphics cards" | ||||
|  | ||||
| @@ -243,7 +305,7 @@ class Agent(BaseAuditModel): | ||||
|             return "Graphics info requires agent v1.4.14" | ||||
|  | ||||
|     @property | ||||
|     def local_ips(self): | ||||
|     def local_ips(self) -> str: | ||||
|         if self.is_posix: | ||||
|             try: | ||||
|                 return ", ".join(self.wmi_detail["local_ips"]) | ||||
| @@ -270,15 +332,15 @@ class Agent(BaseAuditModel): | ||||
|                     ret.append(ip) | ||||
|  | ||||
|         if len(ret) == 1: | ||||
|             return ret[0] | ||||
|             return cast(str, ret[0]) | ||||
|         else: | ||||
|             return ", ".join(ret) if ret else "error getting local ips" | ||||
|  | ||||
|     @property | ||||
|     def make_model(self): | ||||
|     def make_model(self) -> str: | ||||
|         if self.is_posix: | ||||
|             try: | ||||
|                 return self.wmi_detail["make_model"] | ||||
|                 return cast(str, self.wmi_detail["make_model"]) | ||||
|             except: | ||||
|                 return "error getting make/model" | ||||
|  | ||||
| @@ -304,17 +366,17 @@ class Agent(BaseAuditModel): | ||||
|  | ||||
|         try: | ||||
|             comp_sys_prod = self.wmi_detail["comp_sys_prod"][0] | ||||
|             return [x["Version"] for x in comp_sys_prod if "Version" in x][0] | ||||
|             return cast(str, [x["Version"] for x in comp_sys_prod if "Version" in x][0]) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|         return "unknown make/model" | ||||
|  | ||||
|     @property | ||||
|     def physical_disks(self): | ||||
|     def physical_disks(self) -> Sequence[Disk]: | ||||
|         if self.is_posix: | ||||
|             try: | ||||
|                 return self.wmi_detail["disks"] | ||||
|                 return cast(List[Disk], self.wmi_detail["disks"]) | ||||
|             except: | ||||
|                 return ["unknown disk"] | ||||
|  | ||||
| @@ -339,14 +401,94 @@ class Agent(BaseAuditModel): | ||||
|         except: | ||||
|             return ["unknown disk"] | ||||
|  | ||||
|     def is_supported_script(self, platforms: list) -> bool: | ||||
|     @classmethod | ||||
|     def online_agents(cls, min_version: str = "") -> "List[Agent]": | ||||
|         if min_version: | ||||
|             return [ | ||||
|                 i | ||||
|                 for i in cls.objects.only(*ONLINE_AGENTS) | ||||
|                 if pyver.parse(i.version) >= pyver.parse(min_version) | ||||
|                 and i.status == AGENT_STATUS_ONLINE | ||||
|             ] | ||||
|  | ||||
|         return [ | ||||
|             i | ||||
|             for i in cls.objects.only(*ONLINE_AGENTS) | ||||
|             if i.status == AGENT_STATUS_ONLINE | ||||
|         ] | ||||
|  | ||||
|     def is_supported_script(self, platforms: List[str]) -> bool: | ||||
|         return self.plat.lower() in platforms if platforms else True | ||||
|  | ||||
|     def get_agent_policies(self): | ||||
|     def get_checks_with_policies( | ||||
|         self, exclude_overridden: bool = False | ||||
|     ) -> "List[Check]": | ||||
|  | ||||
|         if exclude_overridden: | ||||
|             checks = ( | ||||
|                 list( | ||||
|                     check | ||||
|                     for check in self.agentchecks.all() | ||||
|                     if not check.overridden_by_policy | ||||
|                 ) | ||||
|                 + self.get_checks_from_policies() | ||||
|             ) | ||||
|         else: | ||||
|             checks = list(self.agentchecks.all()) + self.get_checks_from_policies() | ||||
|         return self.add_check_results(checks) | ||||
|  | ||||
|     def get_tasks_with_policies(self) -> "List[AutomatedTask]": | ||||
|  | ||||
|         tasks = list(self.autotasks.all()) + self.get_tasks_from_policies() | ||||
|         return self.add_task_results(tasks) | ||||
|  | ||||
|     def add_task_results(self, tasks: "List[AutomatedTask]") -> "List[AutomatedTask]": | ||||
|  | ||||
|         results = self.taskresults.all()  # type: ignore | ||||
|  | ||||
|         for task in tasks: | ||||
|             for result in results: | ||||
|                 if result.task.id == task.pk: | ||||
|                     task.task_result = result | ||||
|                     break | ||||
|  | ||||
|         return tasks | ||||
|  | ||||
|     def add_check_results(self, checks: "List[Check]") -> "List[Check]": | ||||
|  | ||||
|         results = self.checkresults.all()  # type: ignore | ||||
|  | ||||
|         for check in checks: | ||||
|             for result in results: | ||||
|                 if result.assigned_check.id == check.pk: | ||||
|                     check.check_result = result | ||||
|                     break | ||||
|  | ||||
|         return checks | ||||
|  | ||||
|     def get_agent_policies(self) -> "Dict[str, Optional[Policy]]": | ||||
|         from checks.models import Check | ||||
|  | ||||
|         site_policy = getattr(self.site, f"{self.monitoring_type}_policy", None) | ||||
|         client_policy = getattr(self.client, f"{self.monitoring_type}_policy", None) | ||||
|         default_policy = getattr( | ||||
|             CoreSettings.objects.first(), f"{self.monitoring_type}_policy", None | ||||
|             get_core_settings(), f"{self.monitoring_type}_policy", None | ||||
|         ) | ||||
|  | ||||
|         # prefetch excluded objects on polices only if policy is not Non | ||||
|         models.prefetch_related_objects( | ||||
|             [ | ||||
|                 policy | ||||
|                 for policy in [self.policy, site_policy, client_policy, default_policy] | ||||
|                 if policy | ||||
|             ], | ||||
|             "excluded_agents", | ||||
|             "excluded_sites", | ||||
|             "excluded_clients", | ||||
|             models.Prefetch( | ||||
|                 "policychecks", queryset=Check.objects.select_related("script") | ||||
|             ), | ||||
|             "autotasks", | ||||
|         ) | ||||
|  | ||||
|         return { | ||||
| @@ -373,32 +515,34 @@ class Agent(BaseAuditModel): | ||||
|     def check_run_interval(self) -> int: | ||||
|         interval = self.check_interval | ||||
|         # determine if any agent checks have a custom interval and set the lowest interval | ||||
|         for check in self.agentchecks.filter(overriden_by_policy=False):  # type: ignore | ||||
|         for check in self.get_checks_with_policies(): | ||||
|             if check.run_interval and check.run_interval < interval: | ||||
|  | ||||
|                 # don't allow check runs less than 15s | ||||
|                 if check.run_interval < 15: | ||||
|                     interval = 15 | ||||
|                 else: | ||||
|                     interval = check.run_interval | ||||
|                 interval = 15 if check.run_interval < 15 else check.run_interval | ||||
|  | ||||
|         return interval | ||||
|  | ||||
|     def run_script( | ||||
|         self, | ||||
|         scriptpk: int, | ||||
|         args: list[str] = [], | ||||
|         args: List[str] = [], | ||||
|         timeout: int = 120, | ||||
|         full: bool = False, | ||||
|         wait: bool = False, | ||||
|         run_on_any: bool = False, | ||||
|         history_pk: int = 0, | ||||
|         run_as_user: bool = False, | ||||
|     ) -> Any: | ||||
|  | ||||
|         from scripts.models import Script | ||||
|  | ||||
|         script = Script.objects.get(pk=scriptpk) | ||||
|  | ||||
|         # always override if set on script model | ||||
|         if script.run_as_user: | ||||
|             run_as_user = True | ||||
|  | ||||
|         parsed_args = script.parse_script_args(self, script.shell, args) | ||||
|  | ||||
|         data = { | ||||
| @@ -409,6 +553,7 @@ class Agent(BaseAuditModel): | ||||
|                 "code": script.code, | ||||
|                 "shell": script.shell, | ||||
|             }, | ||||
|             "run_as_user": run_as_user, | ||||
|         } | ||||
|  | ||||
|         if history_pk != 0: | ||||
| @@ -424,15 +569,7 @@ class Agent(BaseAuditModel): | ||||
|             if r == "pong": | ||||
|                 running_agent = self | ||||
|             else: | ||||
|                 online = [ | ||||
|                     agent | ||||
|                     for agent in Agent.objects.only( | ||||
|                         "pk", "agent_id", "last_seen", "overdue_time", "offline_time" | ||||
|                     ) | ||||
|                     if agent.status == "online" | ||||
|                 ] | ||||
|  | ||||
|                 for agent in online: | ||||
|                 for agent in Agent.online_agents(): | ||||
|                     r = asyncio.run(agent.nats_cmd(nats_ping, timeout=1)) | ||||
|                     if r == "pong": | ||||
|                         running_agent = agent | ||||
| @@ -449,49 +586,44 @@ class Agent(BaseAuditModel): | ||||
|         return "ok" | ||||
|  | ||||
|     # auto approves updates | ||||
|     def approve_updates(self): | ||||
|     def approve_updates(self) -> None: | ||||
|         patch_policy = self.get_patch_policy() | ||||
|  | ||||
|         updates = list() | ||||
|         severity_list = list() | ||||
|         if patch_policy.critical == "approve": | ||||
|             updates += self.winupdates.filter(  # type: ignore | ||||
|                 severity="Critical", installed=False | ||||
|             ).exclude(action="approve") | ||||
|             severity_list.append("Critical") | ||||
|  | ||||
|         if patch_policy.important == "approve": | ||||
|             updates += self.winupdates.filter(  # type: ignore | ||||
|                 severity="Important", installed=False | ||||
|             ).exclude(action="approve") | ||||
|             severity_list.append("Important") | ||||
|  | ||||
|         if patch_policy.moderate == "approve": | ||||
|             updates += self.winupdates.filter(  # type: ignore | ||||
|                 severity="Moderate", installed=False | ||||
|             ).exclude(action="approve") | ||||
|             severity_list.append("Moderate") | ||||
|  | ||||
|         if patch_policy.low == "approve": | ||||
|             updates += self.winupdates.filter(severity="Low", installed=False).exclude(  # type: ignore | ||||
|                 action="approve" | ||||
|             ) | ||||
|             severity_list.append("Low") | ||||
|  | ||||
|         if patch_policy.other == "approve": | ||||
|             updates += self.winupdates.filter(severity="", installed=False).exclude(  # type: ignore | ||||
|                 action="approve" | ||||
|             ) | ||||
|             severity_list.append("") | ||||
|  | ||||
|         for update in updates: | ||||
|             update.action = "approve" | ||||
|             update.save(update_fields=["action"]) | ||||
|         self.winupdates.filter(severity__in=severity_list, installed=False).exclude( | ||||
|             action="approve" | ||||
|         ).update(action="approve") | ||||
|  | ||||
|     # returns agent policy merged with a client or site specific policy | ||||
|     def get_patch_policy(self): | ||||
|     def get_patch_policy(self) -> "WinUpdatePolicy": | ||||
|         from winupdate.models import WinUpdatePolicy | ||||
|  | ||||
|         # check if site has a patch policy and if so use it | ||||
|         patch_policy = None | ||||
|         agent_policy = self.winupdatepolicy.first()  # type: ignore | ||||
|  | ||||
|         agent_policy = self.winupdatepolicy.first() | ||||
|  | ||||
|         if not agent_policy: | ||||
|             agent_policy = WinUpdatePolicy.objects.create(agent=self) | ||||
|  | ||||
|         policies = self.get_agent_policies() | ||||
|  | ||||
|         processed_policies = list() | ||||
|         processed_policies: List[int] = list() | ||||
|         for _, policy in policies.items(): | ||||
|             if ( | ||||
|                 policy | ||||
| @@ -545,12 +677,13 @@ class Agent(BaseAuditModel): | ||||
|  | ||||
|     # sets alert template assigned in the following order: policy, site, client, global | ||||
|     # sets None if nothing is found | ||||
|     def set_alert_template(self): | ||||
|         core = CoreSettings.objects.first() | ||||
|     def set_alert_template(self) -> "Optional[AlertTemplate]": | ||||
|         core = get_core_settings() | ||||
|  | ||||
|         policies = self.get_agent_policies() | ||||
|  | ||||
|         # loop through all policies applied to agent and return an alert_template if found | ||||
|         processed_policies = list() | ||||
|         processed_policies: List[int] = list() | ||||
|         for key, policy in policies.items(): | ||||
|             # default alert_template will override a default policy with alert template applied | ||||
|             if ( | ||||
| @@ -598,51 +731,71 @@ class Agent(BaseAuditModel): | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     def generate_checks_from_policies(self): | ||||
|     def get_or_create_alert_if_needed( | ||||
|         self, alert_template: "Optional[AlertTemplate]" | ||||
|     ) -> "Optional[Alert]": | ||||
|         from alerts.models import Alert | ||||
|  | ||||
|         return Alert.create_or_return_availability_alert( | ||||
|             self, skip_create=not self.should_create_alert(alert_template) | ||||
|         ) | ||||
|  | ||||
|     def get_checks_from_policies(self) -> "List[Check]": | ||||
|         from automation.models import Policy | ||||
|  | ||||
|         # Clear agent checks that have overriden_by_policy set | ||||
|         self.agentchecks.update(overriden_by_policy=False)  # type: ignore | ||||
|         # check if agent is blocking inheritance | ||||
|         if self.block_policy_inheritance or self.agentchecks.exists(): | ||||
|             cache_key = f"agent_{self.agent_id}_checks" | ||||
|  | ||||
|         # Generate checks based on policies | ||||
|         Policy.generate_policy_checks(self) | ||||
|         elif self.policy: | ||||
|             cache_key = f"site_{self.monitoring_type}_{self.site_id}_policy_{self.policy_id}_checks" | ||||
|  | ||||
|     def generate_tasks_from_policies(self): | ||||
|         else: | ||||
|             cache_key = f"site_{self.monitoring_type}_{self.site_id}_checks" | ||||
|  | ||||
|         cached_checks = cache.get(cache_key) | ||||
|         if isinstance(cached_checks, list): | ||||
|             return cached_checks | ||||
|         else: | ||||
|             # clear agent checks that have overridden_by_policy set | ||||
|             self.agentchecks.update(overridden_by_policy=False)  # type: ignore | ||||
|  | ||||
|             # get agent checks based on policies | ||||
|             checks = Policy.get_policy_checks(self) | ||||
|             cache.set(cache_key, checks, 600) | ||||
|             return checks | ||||
|  | ||||
|     def get_tasks_from_policies(self) -> "List[AutomatedTask]": | ||||
|         from automation.models import Policy | ||||
|  | ||||
|         # Generate tasks based on policies | ||||
|         Policy.generate_policy_tasks(self) | ||||
|         # check if agent is blocking inheritance | ||||
|         if self.block_policy_inheritance: | ||||
|             cache_key = f"agent_{self.agent_id}_tasks" | ||||
|  | ||||
|     # https://github.com/Ylianst/MeshCentral/issues/59#issuecomment-521965347 | ||||
|     def get_login_token(self, key, user, action=3): | ||||
|         try: | ||||
|             key = bytes.fromhex(key) | ||||
|             key1 = key[0:48] | ||||
|             key2 = key[48:] | ||||
|             msg = '{{"a":{}, "u":"{}","time":{}}}'.format( | ||||
|                 action, user.lower(), int(time.time()) | ||||
|             ) | ||||
|             iv = get_random_bytes(16) | ||||
|         elif self.policy: | ||||
|             cache_key = f"site_{self.monitoring_type}_{self.site_id}_policy_{self.policy_id}_tasks" | ||||
|  | ||||
|             # sha | ||||
|             h = SHA3_384.new() | ||||
|             h.update(key1) | ||||
|             hashed_msg = h.digest() + msg.encode() | ||||
|         else: | ||||
|             cache_key = f"site_{self.monitoring_type}_{self.site_id}_tasks" | ||||
|  | ||||
|             # aes | ||||
|             cipher = AES.new(key2, AES.MODE_CBC, iv) | ||||
|             msg = cipher.encrypt(pad(hashed_msg, 16)) | ||||
|         cached_tasks = cache.get(cache_key) | ||||
|         if isinstance(cached_tasks, list): | ||||
|             return cached_tasks | ||||
|         else: | ||||
|             # get agent tasks based on policies | ||||
|             tasks = Policy.get_policy_tasks(self) | ||||
|             cache.set(f"site_{self.site_id}_tasks", tasks, 600) | ||||
|             return tasks | ||||
|  | ||||
|             return base64.b64encode(iv + msg, altchars=b"@$").decode("utf-8") | ||||
|         except Exception: | ||||
|             return "err" | ||||
|     def _do_nats_debug(self, agent: "Agent", message: str) -> None: | ||||
|         DebugLog.error(agent=agent, log_type=DebugLogType.AGENT_ISSUES, message=message) | ||||
|  | ||||
|     def _do_nats_debug(self, agent, message): | ||||
|         DebugLog.error(agent=agent, log_type="agent_issues", message=message) | ||||
|  | ||||
|     async def nats_cmd(self, data: dict, timeout: int = 30, wait: bool = True): | ||||
|     async def nats_cmd( | ||||
|         self, data: Dict[Any, Any], timeout: int = 30, wait: bool = True | ||||
|     ) -> Any: | ||||
|         nats_std_port, _ = get_nats_ports() | ||||
|         options = { | ||||
|             "servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222", | ||||
|             "servers": f"tls://{settings.ALLOWED_HOSTS[0]}:{nats_std_port}", | ||||
|             "user": "tacticalrmm", | ||||
|             "password": settings.SECRET_KEY, | ||||
|             "connect_timeout": 3, | ||||
| @@ -663,7 +816,7 @@ class Agent(BaseAuditModel): | ||||
|                 ret = "timeout" | ||||
|             else: | ||||
|                 try: | ||||
|                     ret = msgpack.loads(msg.data)  # type: ignore | ||||
|                     ret = msgpack.loads(msg.data) | ||||
|                 except Exception as e: | ||||
|                     ret = str(e) | ||||
|                     await sync_to_async(self._do_nats_debug, thread_sensitive=False)( | ||||
| @@ -677,27 +830,59 @@ class Agent(BaseAuditModel): | ||||
|             await nc.flush() | ||||
|             await nc.close() | ||||
|  | ||||
|     def recover(self, mode: str, mesh_uri: str, wait: bool = True) -> tuple[str, bool]: | ||||
|         """ | ||||
|         Return type: tuple(message: str, error: bool) | ||||
|         """ | ||||
|         if mode == "tacagent": | ||||
|             if self.is_posix: | ||||
|                 cmd = "systemctl restart tacticalagent.service" | ||||
|                 shell = 3 | ||||
|             else: | ||||
|                 cmd = "net stop tacticalrmm & taskkill /F /IM tacticalrmm.exe & net start tacticalrmm" | ||||
|                 shell = 1 | ||||
|  | ||||
|             asyncio.run( | ||||
|                 send_command_with_mesh(cmd, mesh_uri, self.mesh_node_id, shell, 0) | ||||
|             ) | ||||
|             return ("ok", False) | ||||
|  | ||||
|         elif mode == "mesh": | ||||
|             data = {"func": "recover", "payload": {"mode": mode}} | ||||
|             if wait: | ||||
|                 r = asyncio.run(self.nats_cmd(data, timeout=20)) | ||||
|                 if r == "ok": | ||||
|                     return ("ok", False) | ||||
|                 else: | ||||
|                     return (str(r), True) | ||||
|             else: | ||||
|                 asyncio.run(self.nats_cmd(data, timeout=20, wait=False)) | ||||
|  | ||||
|             return ("ok", False) | ||||
|  | ||||
|         return ("invalid", True) | ||||
|  | ||||
|     @staticmethod | ||||
|     def serialize(agent): | ||||
|     def serialize(agent: "Agent") -> Dict[str, Any]: | ||||
|         # serializes the agent and returns json | ||||
|         from .serializers import AgentAuditSerializer | ||||
|  | ||||
|         return AgentAuditSerializer(agent).data | ||||
|  | ||||
|     def delete_superseded_updates(self): | ||||
|     def delete_superseded_updates(self) -> None: | ||||
|         try: | ||||
|             pks = []  # list of pks to delete | ||||
|             kbs = list(self.winupdates.values_list("kb", flat=True))  # type: ignore | ||||
|             kbs = list(self.winupdates.values_list("kb", flat=True)) | ||||
|             d = Counter(kbs) | ||||
|             dupes = [k for k, v in d.items() if v > 1] | ||||
|  | ||||
|             for dupe in dupes: | ||||
|                 titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True)  # type: ignore | ||||
|                 titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True) | ||||
|                 # extract the version from the title and sort from oldest to newest | ||||
|                 # skip if no version info is available therefore nothing to parse | ||||
|                 try: | ||||
|                     vers = [ | ||||
|                         re.search(r"\(Version(.*?)\)", i).group(1).strip()  # type: ignore | ||||
|                         re.search(r"\(Version(.*?)\)", i).group(1).strip() | ||||
|                         for i in titles | ||||
|                     ] | ||||
|                     sorted_vers = sorted(vers, key=LooseVersion) | ||||
| @@ -705,16 +890,18 @@ class Agent(BaseAuditModel): | ||||
|                     continue | ||||
|                 # append all but the latest version to our list of pks to delete | ||||
|                 for ver in sorted_vers[:-1]: | ||||
|                     q = self.winupdates.filter(kb=dupe).filter(title__contains=ver)  # type: ignore | ||||
|                     q = self.winupdates.filter(kb=dupe).filter(title__contains=ver) | ||||
|                     pks.append(q.first().pk) | ||||
|  | ||||
|             pks = list(set(pks)) | ||||
|             self.winupdates.filter(pk__in=pks).delete()  # type: ignore | ||||
|             self.winupdates.filter(pk__in=pks).delete() | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     def should_create_alert(self, alert_template=None): | ||||
|         return ( | ||||
|     def should_create_alert( | ||||
|         self, alert_template: "Optional[AlertTemplate]" = None | ||||
|     ) -> bool: | ||||
|         return bool( | ||||
|             self.overdue_dashboard_alert | ||||
|             or self.overdue_email_alert | ||||
|             or self.overdue_text_alert | ||||
| @@ -728,11 +915,10 @@ class Agent(BaseAuditModel): | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     def send_outage_email(self): | ||||
|         from core.models import CoreSettings | ||||
|     def send_outage_email(self) -> None: | ||||
|         CORE = get_core_settings() | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         CORE.send_mail(  # type: ignore | ||||
|         CORE.send_mail( | ||||
|             f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue", | ||||
|             ( | ||||
|                 f"Data has not been received from client {self.client.name}, " | ||||
| @@ -743,11 +929,10 @@ class Agent(BaseAuditModel): | ||||
|             alert_template=self.alert_template, | ||||
|         ) | ||||
|  | ||||
|     def send_recovery_email(self): | ||||
|         from core.models import CoreSettings | ||||
|     def send_recovery_email(self) -> None: | ||||
|         CORE = get_core_settings() | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         CORE.send_mail(  # type: ignore | ||||
|         CORE.send_mail( | ||||
|             f"{self.client.name}, {self.site.name}, {self.hostname} - data received", | ||||
|             ( | ||||
|                 f"Data has been received from client {self.client.name}, " | ||||
| @@ -758,20 +943,18 @@ class Agent(BaseAuditModel): | ||||
|             alert_template=self.alert_template, | ||||
|         ) | ||||
|  | ||||
|     def send_outage_sms(self): | ||||
|         from core.models import CoreSettings | ||||
|     def send_outage_sms(self) -> None: | ||||
|         CORE = get_core_settings() | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         CORE.send_sms(  # type: ignore | ||||
|         CORE.send_sms( | ||||
|             f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue", | ||||
|             alert_template=self.alert_template, | ||||
|         ) | ||||
|  | ||||
|     def send_recovery_sms(self): | ||||
|         from core.models import CoreSettings | ||||
|     def send_recovery_sms(self) -> None: | ||||
|         CORE = get_core_settings() | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         CORE.send_sms(  # type: ignore | ||||
|         CORE.send_sms( | ||||
|             f"{self.client.name}, {self.site.name}, {self.hostname} - data received", | ||||
|             alert_template=self.alert_template, | ||||
|         ) | ||||
| @@ -795,7 +978,7 @@ class Note(models.Model): | ||||
|     note = models.TextField(null=True, blank=True) | ||||
|     entry_time = models.DateTimeField(auto_now_add=True) | ||||
|  | ||||
|     def __str__(self): | ||||
|     def __str__(self) -> str: | ||||
|         return self.agent.hostname | ||||
|  | ||||
|  | ||||
| @@ -823,44 +1006,35 @@ class AgentCustomField(models.Model): | ||||
|         default=list, | ||||
|     ) | ||||
|  | ||||
|     def __str__(self): | ||||
|     def __str__(self) -> str: | ||||
|         return self.field.name | ||||
|  | ||||
|     @property | ||||
|     def value(self): | ||||
|         if self.field.type == "multiple": | ||||
|             return self.multiple_value | ||||
|         elif self.field.type == "checkbox": | ||||
|     def value(self) -> Union[List[Any], bool, str]: | ||||
|         if self.field.type == CustomFieldType.MULTIPLE: | ||||
|             return cast(List[str], self.multiple_value) | ||||
|         elif self.field.type == CustomFieldType.CHECKBOX: | ||||
|             return self.bool_value | ||||
|         else: | ||||
|             return self.string_value | ||||
|             return cast(str, self.string_value) | ||||
|  | ||||
|     def save_to_field(self, value): | ||||
|     def save_to_field(self, value: Union[List[Any], bool, str]) -> None: | ||||
|         if self.field.type in [ | ||||
|             "text", | ||||
|             "number", | ||||
|             "single", | ||||
|             "datetime", | ||||
|             CustomFieldType.TEXT, | ||||
|             CustomFieldType.NUMBER, | ||||
|             CustomFieldType.SINGLE, | ||||
|             CustomFieldType.DATETIME, | ||||
|         ]: | ||||
|             self.string_value = value | ||||
|             self.string_value = cast(str, value) | ||||
|             self.save() | ||||
|         elif self.field.type == "multiple": | ||||
|         elif self.field.type == CustomFieldType.MULTIPLE: | ||||
|             self.multiple_value = value.split(",") | ||||
|             self.save() | ||||
|         elif self.field.type == "checkbox": | ||||
|         elif self.field.type == CustomFieldType.CHECKBOX: | ||||
|             self.bool_value = bool(value) | ||||
|             self.save() | ||||
|  | ||||
|  | ||||
| AGENT_HISTORY_TYPES = ( | ||||
|     ("task_run", "Task Run"), | ||||
|     ("script_run", "Script Run"), | ||||
|     ("cmd_run", "CMD Run"), | ||||
| ) | ||||
|  | ||||
| AGENT_HISTORY_STATUS = (("success", "Success"), ("failure", "Failure")) | ||||
|  | ||||
|  | ||||
| class AgentHistory(models.Model): | ||||
|     objects = PermissionQuerySet.as_manager() | ||||
|  | ||||
| @@ -870,13 +1044,12 @@ class AgentHistory(models.Model): | ||||
|         on_delete=models.CASCADE, | ||||
|     ) | ||||
|     time = models.DateTimeField(auto_now_add=True) | ||||
|     type = models.CharField( | ||||
|         max_length=50, choices=AGENT_HISTORY_TYPES, default="cmd_run" | ||||
|     type: "AgentHistoryType" = models.CharField( | ||||
|         max_length=50, | ||||
|         choices=AgentHistoryType.choices, | ||||
|         default=AgentHistoryType.CMD_RUN, | ||||
|     ) | ||||
|     command = models.TextField(null=True, blank=True, default="") | ||||
|     status = models.CharField( | ||||
|         max_length=50, choices=AGENT_HISTORY_STATUS, default="success" | ||||
|     ) | ||||
|     username = models.CharField(max_length=255, default="system") | ||||
|     results = models.TextField(null=True, blank=True) | ||||
|     script = models.ForeignKey( | ||||
| @@ -888,5 +1061,5 @@ class AgentHistory(models.Model): | ||||
|     ) | ||||
|     script_results = models.JSONField(null=True, blank=True) | ||||
|  | ||||
|     def __str__(self): | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.agent.hostname} - {self.type}" | ||||
|   | ||||
| @@ -4,7 +4,7 @@ from tacticalrmm.permissions import _has_perm, _has_perm_on_agent | ||||
|  | ||||
|  | ||||
| class AgentPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         if r.method == "GET": | ||||
|             if "agent_id" in view.kwargs.keys(): | ||||
|                 return _has_perm(r, "can_list_agents") and _has_perm_on_agent( | ||||
| @@ -26,73 +26,76 @@ class AgentPerms(permissions.BasePermission): | ||||
|  | ||||
|  | ||||
| class RecoverAgentPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         if "agent_id" not in view.kwargs.keys(): | ||||
|             return _has_perm(r, "can_recover_agents") | ||||
|  | ||||
|         return _has_perm(r, "can_recover_agents") and _has_perm_on_agent( | ||||
|             r.user, view.kwargs["agent_id"] | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class MeshPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         return _has_perm(r, "can_use_mesh") and _has_perm_on_agent( | ||||
|             r.user, view.kwargs["agent_id"] | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class UpdateAgentPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         return _has_perm(r, "can_update_agents") | ||||
|  | ||||
|  | ||||
| class PingAgentPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         return _has_perm(r, "can_ping_agents") and _has_perm_on_agent( | ||||
|             r.user, view.kwargs["agent_id"] | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class ManageProcPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         return _has_perm(r, "can_manage_procs") and _has_perm_on_agent( | ||||
|             r.user, view.kwargs["agent_id"] | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class EvtLogPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         return _has_perm(r, "can_view_eventlogs") and _has_perm_on_agent( | ||||
|             r.user, view.kwargs["agent_id"] | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class SendCMDPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         return _has_perm(r, "can_send_cmd") and _has_perm_on_agent( | ||||
|             r.user, view.kwargs["agent_id"] | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class RebootAgentPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         return _has_perm(r, "can_reboot_agents") and _has_perm_on_agent( | ||||
|             r.user, view.kwargs["agent_id"] | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class InstallAgentPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         return _has_perm(r, "can_install_agents") | ||||
|  | ||||
|  | ||||
| class RunScriptPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         return _has_perm(r, "can_run_scripts") and _has_perm_on_agent( | ||||
|             r.user, view.kwargs["agent_id"] | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class AgentNotesPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|  | ||||
|         # permissions for GET /agents/notes/ endpoint | ||||
|         if r.method == "GET": | ||||
| @@ -109,12 +112,12 @@ class AgentNotesPerms(permissions.BasePermission): | ||||
|  | ||||
|  | ||||
| class RunBulkPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         return _has_perm(r, "can_run_bulk") | ||||
|  | ||||
|  | ||||
| class AgentHistoryPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         if "agent_id" in view.kwargs.keys(): | ||||
|             return _has_perm(r, "can_list_agent_history") and _has_perm_on_agent( | ||||
|                 r.user, view.kwargs["agent_id"] | ||||
|   | ||||
| @@ -1,5 +1,7 @@ | ||||
| import pytz | ||||
| from rest_framework import serializers | ||||
|  | ||||
| from tacticalrmm.constants import AGENT_STATUS_ONLINE | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
|  | ||||
| from .models import Agent, AgentCustomField, AgentHistory, Note | ||||
| @@ -79,13 +81,15 @@ class AgentSerializer(serializers.ModelSerializer): | ||||
| class AgentTableSerializer(serializers.ModelSerializer): | ||||
|     status = serializers.ReadOnlyField() | ||||
|     checks = serializers.ReadOnlyField() | ||||
|     last_seen = serializers.SerializerMethodField() | ||||
|     client_name = serializers.ReadOnlyField(source="client.name") | ||||
|     site_name = serializers.ReadOnlyField(source="site.name") | ||||
|     logged_username = serializers.SerializerMethodField() | ||||
|     italic = serializers.SerializerMethodField() | ||||
|     policy = serializers.ReadOnlyField(source="policy.id") | ||||
|     alert_template = serializers.SerializerMethodField() | ||||
|     last_seen = serializers.ReadOnlyField() | ||||
|     pending_actions_count = serializers.ReadOnlyField() | ||||
|     has_patches_pending = serializers.ReadOnlyField() | ||||
|  | ||||
|     def get_alert_template(self, obj): | ||||
|  | ||||
| @@ -99,16 +103,8 @@ class AgentTableSerializer(serializers.ModelSerializer): | ||||
|                 "always_alert": obj.alert_template.agent_always_alert, | ||||
|             } | ||||
|  | ||||
|     def get_last_seen(self, obj) -> str: | ||||
|         if obj.time_zone is not None: | ||||
|             agent_tz = pytz.timezone(obj.time_zone) | ||||
|         else: | ||||
|             agent_tz = self.context["default_tz"] | ||||
|  | ||||
|         return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M") | ||||
|  | ||||
|     def get_logged_username(self, obj) -> str: | ||||
|         if obj.logged_in_username == "None" and obj.status == "online": | ||||
|         if obj.logged_in_username == "None" and obj.status == AGENT_STATUS_ONLINE: | ||||
|             return obj.last_logged_in_user | ||||
|         elif obj.logged_in_username != "None": | ||||
|             return obj.logged_in_username | ||||
| @@ -116,7 +112,7 @@ class AgentTableSerializer(serializers.ModelSerializer): | ||||
|             return "-" | ||||
|  | ||||
|     def get_italic(self, obj) -> bool: | ||||
|         return obj.logged_in_username == "None" and obj.status == "online" | ||||
|         return obj.logged_in_username == "None" and obj.status == AGENT_STATUS_ONLINE | ||||
|  | ||||
|     class Meta: | ||||
|         model = Agent | ||||
| @@ -129,7 +125,6 @@ class AgentTableSerializer(serializers.ModelSerializer): | ||||
|             "monitoring_type", | ||||
|             "description", | ||||
|             "needs_reboot", | ||||
|             "has_patches_pending", | ||||
|             "pending_actions_count", | ||||
|             "status", | ||||
|             "overdue_text_alert", | ||||
| @@ -145,6 +140,7 @@ class AgentTableSerializer(serializers.ModelSerializer): | ||||
|             "block_policy_inheritance", | ||||
|             "plat", | ||||
|             "goarch", | ||||
|             "has_patches_pending", | ||||
|         ] | ||||
|         depth = 2 | ||||
|  | ||||
| @@ -181,17 +177,12 @@ class AgentNoteSerializer(serializers.ModelSerializer): | ||||
|  | ||||
|  | ||||
| class AgentHistorySerializer(serializers.ModelSerializer): | ||||
|     time = serializers.SerializerMethodField(read_only=True) | ||||
|     script_name = serializers.ReadOnlyField(source="script.name") | ||||
|  | ||||
|     class Meta: | ||||
|         model = AgentHistory | ||||
|         fields = "__all__" | ||||
|  | ||||
|     def get_time(self, history): | ||||
|         tz = self.context["default_tz"] | ||||
|         return history.time.astimezone(tz).strftime("%m %d %Y %H:%M:%S") | ||||
|  | ||||
|  | ||||
| class AgentAuditSerializer(serializers.ModelSerializer): | ||||
|     class Meta: | ||||
|   | ||||
| @@ -1,120 +1,52 @@ | ||||
| import asyncio | ||||
| import datetime as dt | ||||
| import random | ||||
| from time import sleep | ||||
| from typing import Union | ||||
| from typing import TYPE_CHECKING, Optional | ||||
|  | ||||
| from django.core.management import call_command | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from agents.models import Agent | ||||
| from agents.utils import get_agent_url | ||||
| from core.models import CoreSettings | ||||
| from django.conf import settings | ||||
| from django.utils import timezone as djangotime | ||||
| from logs.models import DebugLog, PendingAction | ||||
| from packaging import version as pyver | ||||
| from core.utils import get_core_settings | ||||
| from logs.models import DebugLog | ||||
| from scripts.models import Script | ||||
|  | ||||
| from tacticalrmm.celery import app | ||||
| from tacticalrmm.constants import ( | ||||
|     AGENT_DEFER, | ||||
|     AGENT_STATUS_OVERDUE, | ||||
|     CheckStatus, | ||||
|     DebugLogType, | ||||
| ) | ||||
|  | ||||
|  | ||||
| def agent_update(agent_id: str, force: bool = False) -> str: | ||||
|  | ||||
|     agent = Agent.objects.get(agent_id=agent_id) | ||||
|  | ||||
|     if pyver.parse(agent.version) <= pyver.parse("1.3.0"): | ||||
|         return "not supported" | ||||
|  | ||||
|     # skip if we can't determine the arch | ||||
|     if agent.arch is None: | ||||
|         DebugLog.warning( | ||||
|             agent=agent, | ||||
|             log_type="agent_issues", | ||||
|             message=f"Unable to determine arch on {agent.hostname}({agent.agent_id}). Skipping agent update.", | ||||
|         ) | ||||
|         return "noarch" | ||||
|  | ||||
|     version = settings.LATEST_AGENT_VER | ||||
|     inno = agent.win_inno_exe | ||||
|     url = get_agent_url(agent.arch, agent.plat) | ||||
|  | ||||
|     if not force: | ||||
|         if agent.pendingactions.filter( | ||||
|             action_type="agentupdate", status="pending" | ||||
|         ).exists(): | ||||
|             agent.pendingactions.filter( | ||||
|                 action_type="agentupdate", status="pending" | ||||
|             ).delete() | ||||
|  | ||||
|         PendingAction.objects.create( | ||||
|             agent=agent, | ||||
|             action_type="agentupdate", | ||||
|             details={ | ||||
|                 "url": url, | ||||
|                 "version": version, | ||||
|                 "inno": inno, | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     nats_data = { | ||||
|         "func": "agentupdate", | ||||
|         "payload": { | ||||
|             "url": url, | ||||
|             "version": version, | ||||
|             "inno": inno, | ||||
|         }, | ||||
|     } | ||||
|     asyncio.run(agent.nats_cmd(nats_data, wait=False)) | ||||
|     return "created" | ||||
| if TYPE_CHECKING: | ||||
|     from django.db.models.query import QuerySet | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def force_code_sign(agent_ids: list[str]) -> None: | ||||
|     chunks = (agent_ids[i : i + 50] for i in range(0, len(agent_ids), 50)) | ||||
|     for chunk in chunks: | ||||
|         for agent_id in chunk: | ||||
|             agent_update(agent_id=agent_id, force=True) | ||||
|             sleep(0.05) | ||||
|         sleep(4) | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def send_agent_update_task(agent_ids: list[str]) -> None: | ||||
|     chunks = (agent_ids[i : i + 50] for i in range(0, len(agent_ids), 50)) | ||||
|     for chunk in chunks: | ||||
|         for agent_id in chunk: | ||||
|             agent_update(agent_id) | ||||
|             sleep(0.05) | ||||
|         sleep(4) | ||||
| def send_agent_update_task(*, agent_ids: list[str], token: str, force: bool) -> None: | ||||
|     agents: "QuerySet[Agent]" = Agent.objects.defer(*AGENT_DEFER).filter( | ||||
|         agent_id__in=agent_ids | ||||
|     ) | ||||
|     for agent in agents: | ||||
|         agent.do_update(token=token, force=force) | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def auto_self_agent_update_task() -> None: | ||||
|     core = CoreSettings.objects.first() | ||||
|     if not core.agent_auto_update:  # type:ignore | ||||
|         return | ||||
|  | ||||
|     q = Agent.objects.only("agent_id", "version") | ||||
|     agent_ids: list[str] = [ | ||||
|         i.agent_id | ||||
|         for i in q | ||||
|         if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER) | ||||
|     ] | ||||
|  | ||||
|     chunks = (agent_ids[i : i + 30] for i in range(0, len(agent_ids), 30)) | ||||
|     for chunk in chunks: | ||||
|         for agent_id in chunk: | ||||
|             agent_update(agent_id) | ||||
|             sleep(0.05) | ||||
|         sleep(4) | ||||
|     call_command("update_agents") | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None) -> str: | ||||
| def agent_outage_email_task(pk: int, alert_interval: Optional[float] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|     try: | ||||
|         alert = Alert.objects.get(pk=pk) | ||||
|     except Alert.DoesNotExist: | ||||
|         return "alert not found" | ||||
|  | ||||
|     if not alert.email_sent: | ||||
|         sleep(random.randint(1, 15)) | ||||
|         sleep(random.randint(1, 5)) | ||||
|         alert.agent.send_outage_email() | ||||
|         alert.email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["email_sent"]) | ||||
| @@ -123,7 +55,7 @@ def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None) | ||||
|             # send an email only if the last email sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.email_sent < delta: | ||||
|                 sleep(random.randint(1, 10)) | ||||
|                 sleep(random.randint(1, 5)) | ||||
|                 alert.agent.send_outage_email() | ||||
|                 alert.email_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["email_sent"]) | ||||
| @@ -135,8 +67,13 @@ def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None) | ||||
| def agent_recovery_email_task(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     sleep(random.randint(1, 15)) | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|     sleep(random.randint(1, 5)) | ||||
|  | ||||
|     try: | ||||
|         alert = Alert.objects.get(pk=pk) | ||||
|     except Alert.DoesNotExist: | ||||
|         return "alert not found" | ||||
|  | ||||
|     alert.agent.send_recovery_email() | ||||
|     alert.resolved_email_sent = djangotime.now() | ||||
|     alert.save(update_fields=["resolved_email_sent"]) | ||||
| @@ -145,13 +82,16 @@ def agent_recovery_email_task(pk: int) -> str: | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_outage_sms_task(pk: int, alert_interval: Union[float, None] = None) -> str: | ||||
| def agent_outage_sms_task(pk: int, alert_interval: Optional[float] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|     try: | ||||
|         alert = Alert.objects.get(pk=pk) | ||||
|     except Alert.DoesNotExist: | ||||
|         return "alert not found" | ||||
|  | ||||
|     if not alert.sms_sent: | ||||
|         sleep(random.randint(1, 15)) | ||||
|         sleep(random.randint(1, 3)) | ||||
|         alert.agent.send_outage_sms() | ||||
|         alert.sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["sms_sent"]) | ||||
| @@ -160,7 +100,7 @@ def agent_outage_sms_task(pk: int, alert_interval: Union[float, None] = None) -> | ||||
|             # send an sms only if the last sms sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.sms_sent < delta: | ||||
|                 sleep(random.randint(1, 10)) | ||||
|                 sleep(random.randint(1, 3)) | ||||
|                 alert.agent.send_outage_sms() | ||||
|                 alert.sms_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["sms_sent"]) | ||||
| @@ -173,7 +113,11 @@ def agent_recovery_sms_task(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     sleep(random.randint(1, 3)) | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|     try: | ||||
|         alert = Alert.objects.get(pk=pk) | ||||
|     except Alert.DoesNotExist: | ||||
|         return "alert not found" | ||||
|  | ||||
|     alert.agent.send_recovery_sms() | ||||
|     alert.resolved_sms_sent = djangotime.now() | ||||
|     alert.save(update_fields=["resolved_sms_sent"]) | ||||
| @@ -197,7 +141,7 @@ def agent_outages_task() -> None: | ||||
|     ) | ||||
|  | ||||
|     for agent in agents: | ||||
|         if agent.status == "overdue": | ||||
|         if agent.status == AGENT_STATUS_OVERDUE: | ||||
|             Alert.handle_alert_failure(agent) | ||||
|  | ||||
|  | ||||
| @@ -209,6 +153,7 @@ def run_script_email_results_task( | ||||
|     emails: list[str], | ||||
|     args: list[str] = [], | ||||
|     history_pk: int = 0, | ||||
|     run_as_user: bool = False, | ||||
| ): | ||||
|     agent = Agent.objects.get(pk=agentpk) | ||||
|     script = Script.objects.get(pk=scriptpk) | ||||
| @@ -219,16 +164,17 @@ def run_script_email_results_task( | ||||
|         timeout=nats_timeout, | ||||
|         wait=True, | ||||
|         history_pk=history_pk, | ||||
|         run_as_user=run_as_user, | ||||
|     ) | ||||
|     if r == "timeout": | ||||
|         DebugLog.error( | ||||
|             agent=agent, | ||||
|             log_type="scripting", | ||||
|             log_type=DebugLogType.SCRIPTING, | ||||
|             message=f"{agent.hostname}({agent.pk}) timed out running script.", | ||||
|         ) | ||||
|         return | ||||
|  | ||||
|     CORE = CoreSettings.objects.first() | ||||
|     CORE = get_core_settings() | ||||
|     subject = f"{agent.hostname} {script.name} Results" | ||||
|     exec_time = "{:.4f}".format(r["execution_time"]) | ||||
|     body = ( | ||||
| @@ -241,25 +187,21 @@ def run_script_email_results_task( | ||||
|  | ||||
|     msg = EmailMessage() | ||||
|     msg["Subject"] = subject | ||||
|     msg["From"] = CORE.smtp_from_email  # type:ignore | ||||
|     msg["From"] = CORE.smtp_from_email | ||||
|  | ||||
|     if emails: | ||||
|         msg["To"] = ", ".join(emails) | ||||
|     else: | ||||
|         msg["To"] = ", ".join(CORE.email_alert_recipients)  # type:ignore | ||||
|         msg["To"] = ", ".join(CORE.email_alert_recipients) | ||||
|  | ||||
|     msg.set_content(body) | ||||
|  | ||||
|     try: | ||||
|         with smtplib.SMTP( | ||||
|             CORE.smtp_host, CORE.smtp_port, timeout=20  # type:ignore | ||||
|         ) as server:  # type:ignore | ||||
|             if CORE.smtp_requires_auth:  # type:ignore | ||||
|         with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server: | ||||
|             if CORE.smtp_requires_auth: | ||||
|                 server.ehlo() | ||||
|                 server.starttls() | ||||
|                 server.login( | ||||
|                     CORE.smtp_host_user, CORE.smtp_host_password  # type:ignore | ||||
|                 )  # type:ignore | ||||
|                 server.login(CORE.smtp_host_user, CORE.smtp_host_password) | ||||
|                 server.send_message(msg) | ||||
|                 server.quit() | ||||
|             else: | ||||
| @@ -271,18 +213,22 @@ def run_script_email_results_task( | ||||
|  | ||||
| @app.task | ||||
| def clear_faults_task(older_than_days: int) -> None: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     # https://github.com/amidaware/tacticalrmm/issues/484 | ||||
|     agents = Agent.objects.exclude(last_seen__isnull=True).filter( | ||||
|         last_seen__lt=djangotime.now() - djangotime.timedelta(days=older_than_days) | ||||
|     ) | ||||
|     for agent in agents: | ||||
|         if agent.agentchecks.exists(): | ||||
|             for check in agent.agentchecks.all(): | ||||
|                 # reset check status | ||||
|                 check.status = "passing" | ||||
|                 check.save(update_fields=["status"]) | ||||
|                 if check.alert.filter(resolved=False).exists(): | ||||
|                     check.alert.get(resolved=False).resolve() | ||||
|         for check in agent.get_checks_with_policies(): | ||||
|             # reset check status | ||||
|             if check.check_result: | ||||
|                 check.check_result.status = CheckStatus.PASSING | ||||
|                 check.check_result.save(update_fields=["status"]) | ||||
|             if check.alert.filter(agent=agent, resolved=False).exists(): | ||||
|                 alert = Alert.create_or_return_check_alert(check, agent=agent) | ||||
|                 if alert: | ||||
|                     alert.resolve() | ||||
|  | ||||
|         # reset overdue alerts | ||||
|         agent.overdue_email_alert = False | ||||
| @@ -306,3 +252,8 @@ def prune_agent_history(older_than_days: int) -> str: | ||||
|     ).delete() | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def bulk_recover_agents_task() -> None: | ||||
|     call_command("bulk_restart_agents") | ||||
|   | ||||
							
								
								
									
										0
									
								
								api/tacticalrmm/agents/tests/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								api/tacticalrmm/agents/tests/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										106
									
								
								api/tacticalrmm/agents/tests/test_agent_installs.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										106
									
								
								api/tacticalrmm/agents/tests/test_agent_installs.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,106 @@ | ||||
| from unittest.mock import patch | ||||
|  | ||||
| from rest_framework.response import Response | ||||
|  | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
|  | ||||
|  | ||||
| class TestAgentInstalls(TacticalTestCase): | ||||
|     def setUp(self) -> None: | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|         self.setup_base_instance() | ||||
|  | ||||
|     @patch("agents.utils.generate_linux_install") | ||||
|     @patch("knox.models.AuthToken.objects.create") | ||||
|     @patch("tacticalrmm.utils.generate_winagent_exe") | ||||
|     @patch("core.utils.token_is_valid") | ||||
|     @patch("agents.utils.get_agent_url") | ||||
|     def test_install_agent( | ||||
|         self, | ||||
|         mock_agent_url, | ||||
|         mock_token_valid, | ||||
|         mock_gen_win_exe, | ||||
|         mock_auth, | ||||
|         mock_linux_install, | ||||
|     ): | ||||
|         mock_agent_url.return_value = "https://example.com" | ||||
|         mock_token_valid.return_value = "", False | ||||
|         mock_gen_win_exe.return_value = Response("ok") | ||||
|         mock_auth.return_value = "", "token" | ||||
|         mock_linux_install.return_value = Response("ok") | ||||
|  | ||||
|         url = "/agents/installer/" | ||||
|  | ||||
|         # test windows dynamic exe | ||||
|         data = { | ||||
|             "installMethod": "exe", | ||||
|             "client": self.site2.client.pk, | ||||
|             "site": self.site2.pk, | ||||
|             "expires": 24, | ||||
|             "agenttype": "server", | ||||
|             "power": 0, | ||||
|             "rdp": 1, | ||||
|             "ping": 0, | ||||
|             "goarch": "amd64", | ||||
|             "api": "https://api.example.com", | ||||
|             "fileName": "rmm-client-site-server.exe", | ||||
|             "plat": "windows", | ||||
|         } | ||||
|  | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         mock_gen_win_exe.assert_called_with( | ||||
|             client=self.site2.client.pk, | ||||
|             site=self.site2.pk, | ||||
|             agent_type="server", | ||||
|             rdp=1, | ||||
|             ping=0, | ||||
|             power=0, | ||||
|             goarch="amd64", | ||||
|             token="token", | ||||
|             api="https://api.example.com", | ||||
|             file_name="rmm-client-site-server.exe", | ||||
|         ) | ||||
|  | ||||
|         # test linux no code sign | ||||
|         data["plat"] = "linux" | ||||
|         data["installMethod"] = "bash" | ||||
|         data["rdp"] = 0 | ||||
|         data["agenttype"] = "workstation" | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|  | ||||
|         # test linux | ||||
|         mock_token_valid.return_value = "token123", True | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         mock_linux_install.assert_called_with( | ||||
|             client=str(self.site2.client.pk), | ||||
|             site=str(self.site2.pk), | ||||
|             agent_type="workstation", | ||||
|             arch="amd64", | ||||
|             token="token", | ||||
|             api="https://api.example.com", | ||||
|             download_url="https://example.com", | ||||
|         ) | ||||
|  | ||||
|         # test manual | ||||
|         data["rdp"] = 1 | ||||
|         data["installMethod"] = "manual" | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertIn("rdp", r.json()["cmd"]) | ||||
|         self.assertNotIn("power", r.json()["cmd"]) | ||||
|  | ||||
|         data.update({"ping": 1, "power": 1}) | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertIn("power", r.json()["cmd"]) | ||||
|         self.assertIn("ping", r.json()["cmd"]) | ||||
|  | ||||
|         # test powershell | ||||
|         data["installMethod"] = "powershell" | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
							
								
								
									
										313
									
								
								api/tacticalrmm/agents/tests/test_agent_update.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										313
									
								
								api/tacticalrmm/agents/tests/test_agent_update.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,313 @@ | ||||
| from unittest.mock import patch | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.core.management import call_command | ||||
| from model_bakery import baker | ||||
| from packaging import version as pyver | ||||
|  | ||||
| from agents.models import Agent | ||||
| from agents.tasks import auto_self_agent_update_task, send_agent_update_task | ||||
| from logs.models import PendingAction | ||||
| from tacticalrmm.constants import ( | ||||
|     AGENT_DEFER, | ||||
|     AgentMonType, | ||||
|     AgentPlat, | ||||
|     GoArch, | ||||
|     PAAction, | ||||
|     PAStatus, | ||||
| ) | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
|  | ||||
|  | ||||
| class TestAgentUpdate(TacticalTestCase): | ||||
|     def setUp(self) -> None: | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|         self.setup_base_instance() | ||||
|  | ||||
|     @patch("agents.management.commands.update_agents.send_agent_update_task.delay") | ||||
|     @patch("agents.management.commands.update_agents.token_is_valid") | ||||
|     @patch("agents.management.commands.update_agents.get_core_settings") | ||||
|     def test_update_agents_mgmt_command(self, mock_core, mock_token, mock_update): | ||||
|         mock_token.return_value = ("token123", True) | ||||
|  | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site1, | ||||
|             monitoring_type=AgentMonType.SERVER, | ||||
|             plat=AgentPlat.WINDOWS, | ||||
|             version="2.0.3", | ||||
|             _quantity=6, | ||||
|         ) | ||||
|  | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site3, | ||||
|             monitoring_type=AgentMonType.WORKSTATION, | ||||
|             plat=AgentPlat.LINUX, | ||||
|             version="2.0.3", | ||||
|             _quantity=5, | ||||
|         ) | ||||
|  | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site2, | ||||
|             monitoring_type=AgentMonType.SERVER, | ||||
|             plat=AgentPlat.WINDOWS, | ||||
|             version=settings.LATEST_AGENT_VER, | ||||
|             _quantity=8, | ||||
|         ) | ||||
|  | ||||
|         mock_core.return_value.agent_auto_update = False | ||||
|         call_command("update_agents") | ||||
|         mock_update.assert_not_called() | ||||
|  | ||||
|         mock_core.return_value.agent_auto_update = True | ||||
|         call_command("update_agents") | ||||
|  | ||||
|         ids = list( | ||||
|             Agent.objects.defer(*AGENT_DEFER) | ||||
|             .exclude(version=settings.LATEST_AGENT_VER) | ||||
|             .values_list("agent_id", flat=True) | ||||
|         ) | ||||
|  | ||||
|         mock_update.assert_called_with(agent_ids=ids, token="token123", force=False) | ||||
|  | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     @patch("agents.models.get_agent_url") | ||||
|     def test_do_update(self, mock_agent_url, mock_nats_cmd): | ||||
|         mock_agent_url.return_value = "https://example.com/123" | ||||
|  | ||||
|         # test noarch | ||||
|         agent_noarch = baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site1, | ||||
|             monitoring_type=AgentMonType.SERVER, | ||||
|             plat=AgentPlat.WINDOWS, | ||||
|             version="2.3.0", | ||||
|         ) | ||||
|         r = agent_noarch.do_update(token="", force=True) | ||||
|         self.assertEqual(r, "noarch") | ||||
|  | ||||
|         # test too old | ||||
|         agent_old = baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site2, | ||||
|             monitoring_type=AgentMonType.SERVER, | ||||
|             plat=AgentPlat.WINDOWS, | ||||
|             version="1.3.0", | ||||
|             goarch=GoArch.AMD64, | ||||
|         ) | ||||
|         r = agent_old.do_update(token="", force=True) | ||||
|         self.assertEqual(r, "not supported") | ||||
|  | ||||
|         win = baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site1, | ||||
|             monitoring_type=AgentMonType.SERVER, | ||||
|             plat=AgentPlat.WINDOWS, | ||||
|             version="2.3.0", | ||||
|             goarch=GoArch.AMD64, | ||||
|         ) | ||||
|  | ||||
|         lin = baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site3, | ||||
|             monitoring_type=AgentMonType.WORKSTATION, | ||||
|             plat=AgentPlat.LINUX, | ||||
|             version="2.3.0", | ||||
|             goarch=GoArch.ARM32, | ||||
|         ) | ||||
|  | ||||
|         # test windows agent update | ||||
|         r = win.do_update(token="", force=False) | ||||
|         self.assertEqual(r, "created") | ||||
|         mock_nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "agentupdate", | ||||
|                 "payload": { | ||||
|                     "url": "https://example.com/123", | ||||
|                     "version": settings.LATEST_AGENT_VER, | ||||
|                     "inno": f"tacticalagent-v{settings.LATEST_AGENT_VER}-windows-amd64.exe", | ||||
|                 }, | ||||
|             }, | ||||
|             wait=False, | ||||
|         ) | ||||
|         action1 = PendingAction.objects.get(agent__agent_id=win.agent_id) | ||||
|         self.assertEqual(action1.action_type, PAAction.AGENT_UPDATE) | ||||
|         self.assertEqual(action1.status, PAStatus.PENDING) | ||||
|         self.assertEqual(action1.details["url"], "https://example.com/123") | ||||
|         self.assertEqual( | ||||
|             action1.details["inno"], | ||||
|             f"tacticalagent-v{settings.LATEST_AGENT_VER}-windows-amd64.exe", | ||||
|         ) | ||||
|         self.assertEqual(action1.details["version"], settings.LATEST_AGENT_VER) | ||||
|  | ||||
|         mock_nats_cmd.reset_mock() | ||||
|  | ||||
|         # test linux agent update | ||||
|         r = lin.do_update(token="", force=False) | ||||
|         mock_nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "agentupdate", | ||||
|                 "payload": { | ||||
|                     "url": "https://example.com/123", | ||||
|                     "version": settings.LATEST_AGENT_VER, | ||||
|                     "inno": f"tacticalagent-v{settings.LATEST_AGENT_VER}-linux-arm.exe", | ||||
|                 }, | ||||
|             }, | ||||
|             wait=False, | ||||
|         ) | ||||
|         action2 = PendingAction.objects.get(agent__agent_id=lin.agent_id) | ||||
|         self.assertEqual(action2.action_type, PAAction.AGENT_UPDATE) | ||||
|         self.assertEqual(action2.status, PAStatus.PENDING) | ||||
|         self.assertEqual(action2.details["url"], "https://example.com/123") | ||||
|         self.assertEqual( | ||||
|             action2.details["inno"], | ||||
|             f"tacticalagent-v{settings.LATEST_AGENT_VER}-linux-arm.exe", | ||||
|         ) | ||||
|         self.assertEqual(action2.details["version"], settings.LATEST_AGENT_VER) | ||||
|  | ||||
|         # check if old agent update pending actions are being deleted | ||||
|         # should only be 1 pending action at all times | ||||
|         pa_count = win.pendingactions.filter( | ||||
|             action_type=PAAction.AGENT_UPDATE, status=PAStatus.PENDING | ||||
|         ).count() | ||||
|         self.assertEqual(pa_count, 1) | ||||
|  | ||||
|         for _ in range(4): | ||||
|             win.do_update(token="", force=False) | ||||
|  | ||||
|         pa_count = win.pendingactions.filter( | ||||
|             action_type=PAAction.AGENT_UPDATE, status=PAStatus.PENDING | ||||
|         ).count() | ||||
|         self.assertEqual(pa_count, 1) | ||||
|  | ||||
|     def test_auto_self_agent_update_task(self): | ||||
|         auto_self_agent_update_task() | ||||
|  | ||||
|     @patch("agents.models.Agent.do_update") | ||||
|     def test_send_agent_update_task(self, mock_update): | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site2, | ||||
|             monitoring_type=AgentMonType.SERVER, | ||||
|             plat=AgentPlat.WINDOWS, | ||||
|             version="2.3.0", | ||||
|             goarch=GoArch.AMD64, | ||||
|             _quantity=6, | ||||
|         ) | ||||
|         ids = list( | ||||
|             Agent.objects.defer(*AGENT_DEFER) | ||||
|             .exclude(version=settings.LATEST_AGENT_VER) | ||||
|             .values_list("agent_id", flat=True) | ||||
|         ) | ||||
|         send_agent_update_task(agent_ids=ids, token="", force=False) | ||||
|         self.assertEqual(mock_update.call_count, 6) | ||||
|  | ||||
|     @patch("agents.views.token_is_valid") | ||||
|     @patch("agents.tasks.send_agent_update_task.delay") | ||||
|     def test_update_agents(self, mock_update, mock_token): | ||||
|         mock_token.return_value = ("", False) | ||||
|         url = "/agents/update/" | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site2, | ||||
|             monitoring_type=AgentMonType.SERVER, | ||||
|             plat=AgentPlat.WINDOWS, | ||||
|             version="2.3.0", | ||||
|             goarch=GoArch.AMD64, | ||||
|             _quantity=7, | ||||
|         ) | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site2, | ||||
|             monitoring_type=AgentMonType.SERVER, | ||||
|             plat=AgentPlat.WINDOWS, | ||||
|             version=settings.LATEST_AGENT_VER, | ||||
|             goarch=GoArch.AMD64, | ||||
|             _quantity=3, | ||||
|         ) | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site2, | ||||
|             monitoring_type=AgentMonType.WORKSTATION, | ||||
|             plat=AgentPlat.LINUX, | ||||
|             version="2.0.1", | ||||
|             goarch=GoArch.ARM32, | ||||
|             _quantity=9, | ||||
|         ) | ||||
|  | ||||
|         agent_ids: list[str] = list( | ||||
|             Agent.objects.only("agent_id").values_list("agent_id", flat=True) | ||||
|         ) | ||||
|  | ||||
|         data = {"agent_ids": agent_ids} | ||||
|         expected: list[str] = [ | ||||
|             i.agent_id | ||||
|             for i in Agent.objects.only("agent_id", "version") | ||||
|             if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER) | ||||
|         ] | ||||
|  | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         mock_update.assert_called_with(agent_ids=expected, token="", force=False) | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     @patch("agents.views.token_is_valid") | ||||
|     @patch("agents.tasks.send_agent_update_task.delay") | ||||
|     def test_agent_update_permissions(self, update_task, mock_token): | ||||
|         mock_token.return_value = ("", False) | ||||
|  | ||||
|         agents = baker.make_recipe("agents.agent", _quantity=5) | ||||
|         other_agents = baker.make_recipe("agents.agent", _quantity=7) | ||||
|  | ||||
|         url = f"/agents/update/" | ||||
|  | ||||
|         data = { | ||||
|             "agent_ids": [agent.agent_id for agent in agents] | ||||
|             + [agent.agent_id for agent in other_agents] | ||||
|         } | ||||
|  | ||||
|         # test superuser access | ||||
|         self.check_authorized_superuser("post", url, data) | ||||
|         update_task.assert_called_with( | ||||
|             agent_ids=data["agent_ids"], token="", force=False | ||||
|         ) | ||||
|         update_task.reset_mock() | ||||
|  | ||||
|         user = self.create_user_with_roles([]) | ||||
|         self.client.force_authenticate(user=user) | ||||
|  | ||||
|         self.check_not_authorized("post", url, data) | ||||
|         update_task.assert_not_called() | ||||
|  | ||||
|         user.role.can_update_agents = True | ||||
|         user.role.save() | ||||
|  | ||||
|         self.check_authorized("post", url, data) | ||||
|         update_task.assert_called_with( | ||||
|             agent_ids=data["agent_ids"], token="", force=False | ||||
|         ) | ||||
|         update_task.reset_mock() | ||||
|  | ||||
|         # limit to client | ||||
|         # user.role.can_view_clients.set([agents[0].client]) | ||||
|         # self.check_authorized("post", url, data) | ||||
|         # update_task.assert_called_with(agent_ids=[agent.agent_id for agent in agents]) | ||||
|         # update_task.reset_mock() | ||||
|  | ||||
|         # add site | ||||
|         # user.role.can_view_sites.set([other_agents[0].site]) | ||||
|         # self.check_authorized("post", url, data) | ||||
|         # update_task.assert_called_with(agent_ids=data["agent_ids"]) | ||||
|         # update_task.reset_mock() | ||||
|  | ||||
|         # remove client permissions | ||||
|         # user.role.can_view_clients.clear() | ||||
|         # self.check_authorized("post", url, data) | ||||
|         # update_task.assert_called_with( | ||||
|         #     agent_ids=[agent.agent_id for agent in other_agents] | ||||
|         # ) | ||||
							
								
								
									
										60
									
								
								api/tacticalrmm/agents/tests/test_agent_utils.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								api/tacticalrmm/agents/tests/test_agent_utils.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,60 @@ | ||||
| from unittest.mock import patch, AsyncMock | ||||
|  | ||||
| from django.conf import settings | ||||
| from rest_framework.response import Response | ||||
|  | ||||
| from agents.utils import generate_linux_install, get_agent_url | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
|  | ||||
|  | ||||
| class TestAgentUtils(TacticalTestCase): | ||||
|     def setUp(self) -> None: | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|         self.setup_base_instance() | ||||
|  | ||||
|     def test_get_agent_url(self): | ||||
|         ver = settings.LATEST_AGENT_VER | ||||
|  | ||||
|         # test without token | ||||
|         r = get_agent_url(goarch="amd64", plat="windows", token="") | ||||
|         expected = f"https://github.com/amidaware/rmmagent/releases/download/v{ver}/tacticalagent-v{ver}-windows-amd64.exe" | ||||
|         self.assertEqual(r, expected) | ||||
|  | ||||
|         # test with token | ||||
|         r = get_agent_url(goarch="386", plat="linux", token="token123") | ||||
|         expected = f"https://{settings.AGENTS_URL}version={ver}&arch=386&token=token123&plat=linux&api=api.example.com" | ||||
|  | ||||
|     @patch("agents.utils.get_mesh_device_id") | ||||
|     @patch("agents.utils.asyncio.run") | ||||
|     @patch("agents.utils.get_mesh_ws_url") | ||||
|     @patch("agents.utils.get_core_settings") | ||||
|     def test_generate_linux_install( | ||||
|         self, mock_core, mock_mesh, mock_async_run, mock_mesh_device_id | ||||
|     ): | ||||
|         mock_mesh_device_id.return_value = "meshdeviceid" | ||||
|         mock_core.return_value.mesh_site = "meshsite" | ||||
|         mock_async_run.return_value = "meshid" | ||||
|         mock_mesh.return_value = "meshws" | ||||
|         r = generate_linux_install( | ||||
|             client="1", | ||||
|             site="1", | ||||
|             agent_type="server", | ||||
|             arch="amd64", | ||||
|             token="token123", | ||||
|             api="api.example.com", | ||||
|             download_url="asdasd3423", | ||||
|         ) | ||||
|  | ||||
|         ret = r.getvalue().decode("utf-8") | ||||
|  | ||||
|         self.assertIn(r"agentDL='asdasd3423'", ret) | ||||
|         self.assertIn( | ||||
|             r"meshDL='meshsite/meshagents?id=meshid&installflags=0&meshinstall=6'", ret | ||||
|         ) | ||||
|         self.assertIn(r"apiURL='api.example.com'", ret) | ||||
|         self.assertIn(r"agentDL='asdasd3423'", ret) | ||||
|         self.assertIn(r"token='token123'", ret) | ||||
|         self.assertIn(r"clientID='1'", ret) | ||||
|         self.assertIn(r"siteID='1'", ret) | ||||
|         self.assertIn(r"agentType='server'", ret) | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										46
									
								
								api/tacticalrmm/agents/tests/test_mgmt_commands.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										46
									
								
								api/tacticalrmm/agents/tests/test_mgmt_commands.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,46 @@ | ||||
| from unittest.mock import call, patch | ||||
|  | ||||
| from django.core.management import call_command | ||||
| from model_bakery import baker | ||||
|  | ||||
| from tacticalrmm.constants import AgentMonType, AgentPlat | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
|  | ||||
|  | ||||
| class TestBulkRestartAgents(TacticalTestCase): | ||||
|     def setUp(self) -> None: | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|         self.setup_base_instance() | ||||
|  | ||||
|     @patch("core.management.commands.bulk_restart_agents.sleep") | ||||
|     @patch("agents.models.Agent.recover") | ||||
|     @patch("core.management.commands.bulk_restart_agents.get_mesh_ws_url") | ||||
|     def test_bulk_restart_agents_mgmt_cmd( | ||||
|         self, get_mesh_ws_url, recover, mock_sleep | ||||
|     ) -> None: | ||||
|         get_mesh_ws_url.return_value = "https://mesh.example.com/test" | ||||
|  | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site1, | ||||
|             monitoring_type=AgentMonType.SERVER, | ||||
|             plat=AgentPlat.WINDOWS, | ||||
|         ) | ||||
|  | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site3, | ||||
|             monitoring_type=AgentMonType.SERVER, | ||||
|             plat=AgentPlat.LINUX, | ||||
|         ) | ||||
|  | ||||
|         calls = [ | ||||
|             call("tacagent", "https://mesh.example.com/test", wait=False), | ||||
|             call("mesh", "", wait=False), | ||||
|         ] | ||||
|  | ||||
|         call_command("bulk_restart_agents") | ||||
|  | ||||
|         recover.assert_has_calls(calls) | ||||
|         mock_sleep.assert_called_with(10) | ||||
							
								
								
									
										63
									
								
								api/tacticalrmm/agents/tests/test_recovery.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										63
									
								
								api/tacticalrmm/agents/tests/test_recovery.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,63 @@ | ||||
| from typing import TYPE_CHECKING | ||||
| from unittest.mock import patch | ||||
|  | ||||
| from model_bakery import baker | ||||
|  | ||||
| from tacticalrmm.constants import AgentMonType, AgentPlat | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from clients.models import Client, Site | ||||
|  | ||||
|  | ||||
| class TestRecovery(TacticalTestCase): | ||||
|     def setUp(self) -> None: | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|         self.client1: "Client" = baker.make("clients.Client") | ||||
|         self.site1: "Site" = baker.make("clients.Site", client=self.client1) | ||||
|  | ||||
|     @patch("agents.models.Agent.recover") | ||||
|     @patch("agents.views.get_mesh_ws_url") | ||||
|     def test_recover(self, get_mesh_ws_url, recover) -> None: | ||||
|         get_mesh_ws_url.return_value = "https://mesh.example.com" | ||||
|  | ||||
|         agent = baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=self.site1, | ||||
|             monitoring_type=AgentMonType.SERVER, | ||||
|             plat=AgentPlat.WINDOWS, | ||||
|         ) | ||||
|  | ||||
|         url = f"/agents/{agent.agent_id}/recover/" | ||||
|  | ||||
|         # test successfull tacticalagent recovery | ||||
|         data = {"mode": "tacagent"} | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         recover.assert_called_with("tacagent", "https://mesh.example.com", wait=False) | ||||
|         get_mesh_ws_url.assert_called_once() | ||||
|  | ||||
|         # reset mocks | ||||
|         recover.reset_mock() | ||||
|         get_mesh_ws_url.reset_mock() | ||||
|  | ||||
|         # test successfull mesh agent recovery | ||||
|         data = {"mode": "mesh"} | ||||
|         recover.return_value = ("ok", False) | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         get_mesh_ws_url.assert_not_called() | ||||
|         recover.assert_called_with("mesh", "") | ||||
|  | ||||
|         # reset mocks | ||||
|         recover.reset_mock() | ||||
|         get_mesh_ws_url.reset_mock() | ||||
|  | ||||
|         # test failed mesh agent recovery | ||||
|         data = {"mode": "mesh"} | ||||
|         recover.return_value = ("Unable to contact the agent", True) | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
| @@ -1,6 +1,7 @@ | ||||
| from django.urls import path | ||||
|  | ||||
| from autotasks.views import GetAddAutoTasks | ||||
| from checks.views import GetAddChecks | ||||
| from django.urls import path | ||||
| from logs.views import PendingActions | ||||
|  | ||||
| from . import views | ||||
| @@ -40,4 +41,5 @@ urlpatterns = [ | ||||
|     path("versions/", views.get_agent_versions), | ||||
|     path("update/", views.update_agents), | ||||
|     path("installer/", views.install_agent), | ||||
|     path("bulkrecovery/", views.bulk_agent_recovery), | ||||
| ] | ||||
|   | ||||
| @@ -2,37 +2,26 @@ import asyncio | ||||
| import tempfile | ||||
| import urllib.parse | ||||
|  | ||||
| from core.models import CodeSignToken, CoreSettings | ||||
| from core.utils import get_mesh_device_id, get_mesh_ws_url | ||||
| from django.conf import settings | ||||
| from django.http import FileResponse | ||||
|  | ||||
| from core.utils import get_core_settings, get_mesh_device_id, get_mesh_ws_url | ||||
| from tacticalrmm.constants import MeshAgentIdent | ||||
|  | ||||
|  | ||||
| def get_agent_url(arch: str, plat: str) -> str: | ||||
| def get_agent_url(*, goarch: str, plat: str, token: str = "") -> str: | ||||
|     ver = settings.LATEST_AGENT_VER | ||||
|     if token: | ||||
|         params = { | ||||
|             "version": ver, | ||||
|             "arch": goarch, | ||||
|             "token": token, | ||||
|             "plat": plat, | ||||
|             "api": settings.ALLOWED_HOSTS[0], | ||||
|         } | ||||
|         return settings.AGENTS_URL + urllib.parse.urlencode(params) | ||||
|  | ||||
|     if plat == "windows": | ||||
|         endpoint = "winagents" | ||||
|         dl_url = settings.DL_32 if arch == "32" else settings.DL_64 | ||||
|     else: | ||||
|         endpoint = "linuxagents" | ||||
|         dl_url = "" | ||||
|  | ||||
|     try: | ||||
|         t: CodeSignToken = CodeSignToken.objects.first()  # type: ignore | ||||
|         if t.is_valid: | ||||
|             base_url = settings.EXE_GEN_URL + f"/api/v1/{endpoint}/?" | ||||
|             params = { | ||||
|                 "version": settings.LATEST_AGENT_VER, | ||||
|                 "arch": arch, | ||||
|                 "token": t.token, | ||||
|             } | ||||
|             dl_url = base_url + urllib.parse.urlencode(params) | ||||
|     except: | ||||
|         pass | ||||
|  | ||||
|     return dl_url | ||||
|     return f"https://github.com/amidaware/rmmagent/releases/download/v{ver}/tacticalagent-v{ver}-{plat}-{goarch}.exe" | ||||
|  | ||||
|  | ||||
| def generate_linux_install( | ||||
| @@ -54,12 +43,16 @@ def generate_linux_install( | ||||
|             arch_id = MeshAgentIdent.LINUX_ARM_64 | ||||
|         case "arm": | ||||
|             arch_id = MeshAgentIdent.LINUX_ARM_HF | ||||
|         case _: | ||||
|             arch_id = "not_found" | ||||
|  | ||||
|     core: CoreSettings = CoreSettings.objects.first()  # type: ignore | ||||
|     core = get_core_settings() | ||||
|  | ||||
|     uri = get_mesh_ws_url() | ||||
|     mesh_id = asyncio.run(get_mesh_device_id(uri, core.mesh_device_group)) | ||||
|     mesh_dl = f"{core.mesh_site}/meshagents?id={mesh_id}&installflags=0&meshinstall={arch_id}"  # type: ignore | ||||
|     mesh_dl = ( | ||||
|         f"{core.mesh_site}/meshagents?id={mesh_id}&installflags=0&meshinstall={arch_id}" | ||||
|     ) | ||||
|  | ||||
|     sh = settings.LINUX_AGENT_SCRIPT | ||||
|     with open(sh, "r") as f: | ||||
|   | ||||
| @@ -5,32 +5,51 @@ import random | ||||
| import string | ||||
| import time | ||||
|  | ||||
| from core.models import CodeSignToken, CoreSettings | ||||
| from core.utils import get_mesh_ws_url, remove_mesh_agent, send_command_with_mesh | ||||
| from django.conf import settings | ||||
| from django.db.models import Q | ||||
| from django.db.models import Count, Exists, OuterRef, Prefetch, Q | ||||
| from django.http import HttpResponse | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.utils import timezone as djangotime | ||||
| from logs.models import AuditLog, DebugLog, PendingAction | ||||
| from meshctrl.utils import get_login_token | ||||
| from packaging import version as pyver | ||||
| from rest_framework.decorators import api_view, permission_classes | ||||
| from rest_framework.exceptions import PermissionDenied | ||||
| from rest_framework.permissions import IsAuthenticated | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from core.utils import ( | ||||
|     get_core_settings, | ||||
|     get_mesh_ws_url, | ||||
|     remove_mesh_agent, | ||||
|     token_is_valid, | ||||
| ) | ||||
| from logs.models import AuditLog, DebugLog, PendingAction | ||||
| from scripts.models import Script | ||||
| from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
| from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task | ||||
|  | ||||
| from tacticalrmm.constants import AGENT_DEFER | ||||
| from tacticalrmm.constants import ( | ||||
|     AGENT_DEFER, | ||||
|     AGENT_STATUS_OFFLINE, | ||||
|     AGENT_STATUS_ONLINE, | ||||
|     AgentHistoryType, | ||||
|     AgentMonType, | ||||
|     AgentPlat, | ||||
|     CustomFieldModel, | ||||
|     DebugLogType, | ||||
|     EvtLogNames, | ||||
|     PAAction, | ||||
|     PAStatus, | ||||
| ) | ||||
| from tacticalrmm.helpers import date_is_in_past, notify_error | ||||
| from tacticalrmm.permissions import ( | ||||
|     _has_perm_on_agent, | ||||
|     _has_perm_on_client, | ||||
|     _has_perm_on_site, | ||||
| ) | ||||
| from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats | ||||
| from tacticalrmm.utils import get_default_timezone, reload_nats | ||||
| from winupdate.models import WinUpdate | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
| from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task | ||||
|  | ||||
| from .models import Agent, AgentCustomField, AgentHistory, Note | ||||
| from .permissions import ( | ||||
| @@ -57,19 +76,33 @@ from .serializers import ( | ||||
|     AgentSerializer, | ||||
|     AgentTableSerializer, | ||||
| ) | ||||
| from .tasks import run_script_email_results_task, send_agent_update_task | ||||
| from .tasks import ( | ||||
|     bulk_recover_agents_task, | ||||
|     run_script_email_results_task, | ||||
|     send_agent_update_task, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class GetAgents(APIView): | ||||
|     permission_classes = [IsAuthenticated, AgentPerms] | ||||
|  | ||||
|     def get(self, request): | ||||
|         from checks.models import Check, CheckResult | ||||
|  | ||||
|         monitoring_type_filter = Q() | ||||
|         client_site_filter = Q() | ||||
|  | ||||
|         monitoring_type = request.query_params.get("monitoring_type", None) | ||||
|         if monitoring_type: | ||||
|             if monitoring_type in AgentMonType.values: | ||||
|                 monitoring_type_filter = Q(monitoring_type=monitoring_type) | ||||
|             else: | ||||
|                 return notify_error("monitoring type does not exist") | ||||
|  | ||||
|         if "site" in request.query_params.keys(): | ||||
|             filter = Q(site_id=request.query_params["site"]) | ||||
|             client_site_filter = Q(site_id=request.query_params["site"]) | ||||
|         elif "client" in request.query_params.keys(): | ||||
|             filter = Q(site__client_id=request.query_params["client"]) | ||||
|         else: | ||||
|             filter = Q() | ||||
|             client_site_filter = Q(site__client_id=request.query_params["client"]) | ||||
|  | ||||
|         # by default detail=true | ||||
|         if ( | ||||
| @@ -77,24 +110,53 @@ class GetAgents(APIView): | ||||
|             or "detail" in request.query_params.keys() | ||||
|             and request.query_params["detail"] == "true" | ||||
|         ): | ||||
|  | ||||
|             agents = ( | ||||
|                 Agent.objects.filter_by_role(request.user)  # type: ignore | ||||
|                 .select_related("site", "policy", "alert_template") | ||||
|                 .prefetch_related("agentchecks") | ||||
|                 .filter(filter) | ||||
|                 .filter(monitoring_type_filter) | ||||
|                 .filter(client_site_filter) | ||||
|                 .defer(*AGENT_DEFER) | ||||
|                 .select_related( | ||||
|                     "site__server_policy", | ||||
|                     "site__workstation_policy", | ||||
|                     "site__client__server_policy", | ||||
|                     "site__client__workstation_policy", | ||||
|                     "policy", | ||||
|                     "alert_template", | ||||
|                 ) | ||||
|                 .prefetch_related( | ||||
|                     Prefetch( | ||||
|                         "agentchecks", | ||||
|                         queryset=Check.objects.select_related("script"), | ||||
|                     ), | ||||
|                     Prefetch( | ||||
|                         "checkresults", | ||||
|                         queryset=CheckResult.objects.select_related("assigned_check"), | ||||
|                     ), | ||||
|                 ) | ||||
|                 .annotate( | ||||
|                     pending_actions_count=Count( | ||||
|                         "pendingactions", | ||||
|                         filter=Q(pendingactions__status=PAStatus.PENDING), | ||||
|                     ) | ||||
|                 ) | ||||
|                 .annotate( | ||||
|                     has_patches_pending=Exists( | ||||
|                         WinUpdate.objects.filter( | ||||
|                             agent_id=OuterRef("pk"), action="approve", installed=False | ||||
|                         ) | ||||
|                     ) | ||||
|                 ) | ||||
|             ) | ||||
|             ctx = {"default_tz": get_default_timezone()} | ||||
|             serializer = AgentTableSerializer(agents, many=True, context=ctx) | ||||
|             serializer = AgentTableSerializer(agents, many=True) | ||||
|  | ||||
|         # if detail=false | ||||
|         else: | ||||
|             agents = ( | ||||
|                 Agent.objects.filter_by_role(request.user)  # type: ignore | ||||
|                 .select_related("site") | ||||
|                 .filter(filter) | ||||
|                 .only("agent_id", "hostname", "site") | ||||
|                 .defer(*AGENT_DEFER) | ||||
|                 .select_related("site__client") | ||||
|                 .filter(monitoring_type_filter) | ||||
|                 .filter(client_site_filter) | ||||
|             ) | ||||
|             serializer = AgentHostnameSerializer(agents, many=True) | ||||
|  | ||||
| @@ -130,13 +192,13 @@ class GetUpdateDeleteAgent(APIView): | ||||
|             for field in request.data["custom_fields"]: | ||||
|  | ||||
|                 custom_field = field | ||||
|                 custom_field["agent"] = agent.id  # type: ignore | ||||
|                 custom_field["agent"] = agent.pk | ||||
|  | ||||
|                 if AgentCustomField.objects.filter( | ||||
|                     field=field["field"], agent=agent.id  # type: ignore | ||||
|                     field=field["field"], agent=agent.pk | ||||
|                 ): | ||||
|                     value = AgentCustomField.objects.get( | ||||
|                         field=field["field"], agent=agent.id  # type: ignore | ||||
|                         field=field["field"], agent=agent.pk | ||||
|                     ) | ||||
|                     serializer = AgentCustomFieldSerializer( | ||||
|                         instance=value, data=custom_field | ||||
| @@ -155,7 +217,7 @@ class GetUpdateDeleteAgent(APIView): | ||||
|         agent = get_object_or_404(Agent, agent_id=agent_id) | ||||
|  | ||||
|         code = "foo" | ||||
|         if agent.plat == "linux": | ||||
|         if agent.plat == AgentPlat.LINUX: | ||||
|             with open(settings.LINUX_AGENT_SCRIPT, "r") as f: | ||||
|                 code = f.read() | ||||
|  | ||||
| @@ -164,8 +226,14 @@ class GetUpdateDeleteAgent(APIView): | ||||
|         mesh_id = agent.mesh_node_id | ||||
|         agent.delete() | ||||
|         reload_nats() | ||||
|         uri = get_mesh_ws_url() | ||||
|         asyncio.run(remove_mesh_agent(uri, mesh_id)) | ||||
|         try: | ||||
|             uri = get_mesh_ws_url() | ||||
|             asyncio.run(remove_mesh_agent(uri, mesh_id)) | ||||
|         except Exception as e: | ||||
|             DebugLog.error( | ||||
|                 message=f"Unable to remove agent {name} from meshcentral database: {str(e)}", | ||||
|                 log_type=DebugLogType.AGENT_ISSUES, | ||||
|             ) | ||||
|         return Response(f"{name} will now be uninstalled.") | ||||
|  | ||||
|  | ||||
| @@ -206,19 +274,19 @@ class AgentMeshCentral(APIView): | ||||
|     # get mesh urls | ||||
|     def get(self, request, agent_id): | ||||
|         agent = get_object_or_404(Agent, agent_id=agent_id) | ||||
|         core = CoreSettings.objects.first() | ||||
|         core = get_core_settings() | ||||
|  | ||||
|         token = agent.get_login_token( | ||||
|             key=core.mesh_token, | ||||
|             user=f"user//{core.mesh_username.lower()}",  # type:ignore | ||||
|         ) | ||||
|         if not core.mesh_disable_auto_login: | ||||
|             token = get_login_token( | ||||
|                 key=core.mesh_token, user=f"user//{core.mesh_username}" | ||||
|             ) | ||||
|             token_param = f"login={token}&" | ||||
|         else: | ||||
|             token_param = "" | ||||
|  | ||||
|         if token == "err": | ||||
|             return notify_error("Invalid mesh token") | ||||
|  | ||||
|         control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31"  # type:ignore | ||||
|         terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31"  # type:ignore | ||||
|         file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31"  # type:ignore | ||||
|         control = f"{core.mesh_site}/?{token_param}gotonode={agent.mesh_node_id}&viewmode=11&hide=31" | ||||
|         terminal = f"{core.mesh_site}/?{token_param}gotonode={agent.mesh_node_id}&viewmode=12&hide=31" | ||||
|         file = f"{core.mesh_site}/?{token_param}gotonode={agent.mesh_node_id}&viewmode=13&hide=31" | ||||
|  | ||||
|         AuditLog.audit_mesh_session( | ||||
|             username=request.user.username, | ||||
| @@ -252,9 +320,9 @@ class AgentMeshCentral(APIView): | ||||
| @permission_classes([IsAuthenticated, AgentPerms]) | ||||
| def get_agent_versions(request): | ||||
|     agents = ( | ||||
|         Agent.objects.filter_by_role(request.user) | ||||
|         .prefetch_related("site") | ||||
|         .only("pk", "hostname") | ||||
|         Agent.objects.defer(*AGENT_DEFER) | ||||
|         .filter_by_role(request.user)  # type: ignore | ||||
|         .select_related("site__client") | ||||
|     ) | ||||
|     return Response( | ||||
|         { | ||||
| @@ -268,7 +336,7 @@ def get_agent_versions(request): | ||||
| @permission_classes([IsAuthenticated, UpdateAgentPerms]) | ||||
| def update_agents(request): | ||||
|     q = ( | ||||
|         Agent.objects.filter_by_role(request.user) | ||||
|         Agent.objects.filter_by_role(request.user)  # type: ignore | ||||
|         .filter(agent_id__in=request.data["agent_ids"]) | ||||
|         .only("agent_id", "version") | ||||
|     ) | ||||
| @@ -277,7 +345,9 @@ def update_agents(request): | ||||
|         for i in q | ||||
|         if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER) | ||||
|     ] | ||||
|     send_agent_update_task.delay(agent_ids=agent_ids) | ||||
|  | ||||
|     token, _ = token_is_valid() | ||||
|     send_agent_update_task.delay(agent_ids=agent_ids, token=token, force=False) | ||||
|     return Response("ok") | ||||
|  | ||||
|  | ||||
| @@ -285,18 +355,18 @@ def update_agents(request): | ||||
| @permission_classes([IsAuthenticated, PingAgentPerms]) | ||||
| def ping(request, agent_id): | ||||
|     agent = get_object_or_404(Agent, agent_id=agent_id) | ||||
|     status = "offline" | ||||
|     status = AGENT_STATUS_OFFLINE | ||||
|     attempts = 0 | ||||
|     while 1: | ||||
|         r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=2)) | ||||
|         if r == "pong": | ||||
|             status = "online" | ||||
|             status = AGENT_STATUS_ONLINE | ||||
|             break | ||||
|         else: | ||||
|             attempts += 1 | ||||
|             time.sleep(1) | ||||
|             time.sleep(0.5) | ||||
|  | ||||
|         if attempts >= 5: | ||||
|         if attempts >= 3: | ||||
|             break | ||||
|  | ||||
|     return Response({"name": agent.hostname, "status": status}) | ||||
| @@ -311,7 +381,7 @@ def get_event_log(request, agent_id, logtype, days): | ||||
|         return demo_get_eventlog() | ||||
|  | ||||
|     agent = get_object_or_404(Agent, agent_id=agent_id) | ||||
|     timeout = 180 if logtype == "Security" else 30 | ||||
|     timeout = 180 if logtype == EvtLogNames.SECURITY else 30 | ||||
|  | ||||
|     data = { | ||||
|         "func": "eventlog", | ||||
| @@ -345,11 +415,12 @@ def send_raw_cmd(request, agent_id): | ||||
|             "command": request.data["cmd"], | ||||
|             "shell": shell, | ||||
|         }, | ||||
|         "run_as_user": request.data["run_as_user"], | ||||
|     } | ||||
|  | ||||
|     hist = AgentHistory.objects.create( | ||||
|         agent=agent, | ||||
|         type="cmd_run", | ||||
|         type=AgentHistoryType.CMD_RUN, | ||||
|         command=request.data["cmd"], | ||||
|         username=request.user.username[:50], | ||||
|     ) | ||||
| @@ -385,12 +456,17 @@ class Reboot(APIView): | ||||
|     # reboot later | ||||
|     def patch(self, request, agent_id): | ||||
|         agent = get_object_or_404(Agent, agent_id=agent_id) | ||||
|         if agent.is_posix: | ||||
|             return notify_error(f"Not currently implemented for {agent.plat}") | ||||
|  | ||||
|         try: | ||||
|             obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M") | ||||
|             obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%dT%H:%M:%S") | ||||
|         except Exception: | ||||
|             return notify_error("Invalid date") | ||||
|  | ||||
|         if date_is_in_past(datetime_obj=obj, agent_tz=agent.timezone): | ||||
|             return notify_error("Date cannot be set in the past") | ||||
|  | ||||
|         task_name = "TacticalRMM_SchedReboot_" + "".join( | ||||
|             random.choice(string.ascii_letters) for _ in range(10) | ||||
|         ) | ||||
| @@ -426,7 +502,7 @@ class Reboot(APIView): | ||||
|  | ||||
|         details = {"taskname": task_name, "time": str(obj)} | ||||
|         PendingAction.objects.create( | ||||
|             agent=agent, action_type="schedreboot", details=details | ||||
|             agent=agent, action_type=PAAction.SCHED_REBOOT, details=details | ||||
|         ) | ||||
|         nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p") | ||||
|         return Response( | ||||
| @@ -437,27 +513,25 @@ class Reboot(APIView): | ||||
| @api_view(["POST"]) | ||||
| @permission_classes([IsAuthenticated, InstallAgentPerms]) | ||||
| def install_agent(request): | ||||
|     from knox.models import AuthToken | ||||
|  | ||||
|     from accounts.models import User | ||||
|     from agents.utils import get_agent_url | ||||
|     from knox.models import AuthToken | ||||
|     from core.utils import token_is_valid | ||||
|  | ||||
|     client_id = request.data["client"] | ||||
|     site_id = request.data["site"] | ||||
|     version = settings.LATEST_AGENT_VER | ||||
|     arch = request.data["arch"] | ||||
|     goarch = request.data["goarch"] | ||||
|     plat = request.data["plat"] | ||||
|  | ||||
|     if not _has_perm_on_site(request.user, site_id): | ||||
|         raise PermissionDenied() | ||||
|  | ||||
|     inno = ( | ||||
|         f"winagent-v{version}.exe" if arch == "64" else f"winagent-v{version}-x86.exe" | ||||
|     ) | ||||
|     if request.data["installMethod"] == "linux": | ||||
|         plat = "linux" | ||||
|     else: | ||||
|         plat = "windows" | ||||
|     codesign_token, is_valid = token_is_valid() | ||||
|  | ||||
|     download_url = get_agent_url(arch, plat) | ||||
|     inno = f"tacticalagent-v{version}-{plat}-{goarch}.exe" | ||||
|     download_url = get_agent_url(goarch=goarch, plat=plat, token=codesign_token) | ||||
|  | ||||
|     installer_user = User.objects.filter(is_installer_user=True).first() | ||||
|  | ||||
| @@ -475,26 +549,21 @@ def install_agent(request): | ||||
|             rdp=request.data["rdp"], | ||||
|             ping=request.data["ping"], | ||||
|             power=request.data["power"], | ||||
|             arch=arch, | ||||
|             goarch=goarch, | ||||
|             token=token, | ||||
|             api=request.data["api"], | ||||
|             file_name=request.data["fileName"], | ||||
|         ) | ||||
|  | ||||
|     elif request.data["installMethod"] == "linux": | ||||
|     elif request.data["installMethod"] == "bash": | ||||
|         # TODO | ||||
|         # linux agents are in beta for now, only available for sponsors for testing | ||||
|         # remove this after it's out of beta | ||||
|  | ||||
|         try: | ||||
|             t: CodeSignToken = CodeSignToken.objects.first()  # type: ignore | ||||
|         except: | ||||
|             return notify_error("Something went wrong") | ||||
|  | ||||
|         if t is None: | ||||
|             return notify_error("Missing code signing token") | ||||
|         if not t.is_valid: | ||||
|             return notify_error("Code signing token is not valid") | ||||
|         if not is_valid: | ||||
|             return notify_error( | ||||
|                 "Missing code signing token, or token is no longer valid. Please read the docs for more info." | ||||
|             ) | ||||
|  | ||||
|         from agents.utils import generate_linux_install | ||||
|  | ||||
| @@ -502,7 +571,7 @@ def install_agent(request): | ||||
|             client=str(client_id), | ||||
|             site=str(site_id), | ||||
|             agent_type=request.data["agenttype"], | ||||
|             arch=arch, | ||||
|             arch=goarch, | ||||
|             token=token, | ||||
|             api=request.data["api"], | ||||
|             download_url=download_url, | ||||
| @@ -597,28 +666,23 @@ def install_agent(request): | ||||
|  | ||||
| @api_view(["POST"]) | ||||
| @permission_classes([IsAuthenticated, RecoverAgentPerms]) | ||||
| def recover(request, agent_id): | ||||
|     agent = get_object_or_404(Agent, agent_id=agent_id) | ||||
| def recover(request, agent_id: str) -> Response: | ||||
|     agent: Agent = get_object_or_404( | ||||
|         Agent.objects.defer(*AGENT_DEFER), agent_id=agent_id | ||||
|     ) | ||||
|     mode = request.data["mode"] | ||||
|  | ||||
|     if mode == "tacagent": | ||||
|         if agent.is_posix: | ||||
|             cmd = "systemctl restart tacticalagent.service" | ||||
|             shell = 3 | ||||
|         else: | ||||
|             cmd = "net stop tacticalrmm & taskkill /F /IM tacticalrmm.exe & net start tacticalrmm" | ||||
|             shell = 1 | ||||
|         uri = get_mesh_ws_url() | ||||
|         asyncio.run(send_command_with_mesh(cmd, uri, agent.mesh_node_id, shell, 0)) | ||||
|         agent.recover(mode, uri, wait=False) | ||||
|         return Response("Recovery will be attempted shortly") | ||||
|  | ||||
|     elif mode == "mesh": | ||||
|         data = {"func": "recover", "payload": {"mode": mode}} | ||||
|         r = asyncio.run(agent.nats_cmd(data, timeout=20)) | ||||
|         if r == "ok": | ||||
|             return Response("Successfully completed recovery") | ||||
|         r, err = agent.recover(mode, "") | ||||
|         if err: | ||||
|             return notify_error(f"Unable to complete recovery: {r}") | ||||
|  | ||||
|     return notify_error("Something went wrong") | ||||
|     return Response("Successfully completed recovery") | ||||
|  | ||||
|  | ||||
| @api_view(["POST"]) | ||||
| @@ -628,6 +692,7 @@ def run_script(request, agent_id): | ||||
|     script = get_object_or_404(Script, pk=request.data["script"]) | ||||
|     output = request.data["output"] | ||||
|     args = request.data["args"] | ||||
|     run_as_user: bool = request.data["run_as_user"] | ||||
|     req_timeout = int(request.data["timeout"]) + 3 | ||||
|  | ||||
|     AuditLog.audit_script_run( | ||||
| @@ -639,7 +704,7 @@ def run_script(request, agent_id): | ||||
|  | ||||
|     hist = AgentHistory.objects.create( | ||||
|         agent=agent, | ||||
|         type="script_run", | ||||
|         type=AgentHistoryType.SCRIPT_RUN, | ||||
|         script=script, | ||||
|         username=request.user.username[:50], | ||||
|     ) | ||||
| @@ -652,6 +717,7 @@ def run_script(request, agent_id): | ||||
|             timeout=req_timeout, | ||||
|             wait=True, | ||||
|             history_pk=history_pk, | ||||
|             run_as_user=run_as_user, | ||||
|         ) | ||||
|         return Response(r) | ||||
|  | ||||
| @@ -665,6 +731,7 @@ def run_script(request, agent_id): | ||||
|             nats_timeout=req_timeout, | ||||
|             emails=emails, | ||||
|             args=args, | ||||
|             run_as_user=run_as_user, | ||||
|         ) | ||||
|     elif output == "collector": | ||||
|         from core.models import CustomField | ||||
| @@ -675,15 +742,16 @@ def run_script(request, agent_id): | ||||
|             timeout=req_timeout, | ||||
|             wait=True, | ||||
|             history_pk=history_pk, | ||||
|             run_as_user=run_as_user, | ||||
|         ) | ||||
|  | ||||
|         custom_field = CustomField.objects.get(pk=request.data["custom_field"]) | ||||
|  | ||||
|         if custom_field.model == "agent": | ||||
|         if custom_field.model == CustomFieldModel.AGENT: | ||||
|             field = custom_field.get_or_create_field_value(agent) | ||||
|         elif custom_field.model == "client": | ||||
|         elif custom_field.model == CustomFieldModel.CLIENT: | ||||
|             field = custom_field.get_or_create_field_value(agent.client) | ||||
|         elif custom_field.model == "site": | ||||
|         elif custom_field.model == CustomFieldModel.SITE: | ||||
|             field = custom_field.get_or_create_field_value(agent.site) | ||||
|         else: | ||||
|             return notify_error("Custom Field was invalid") | ||||
| @@ -703,13 +771,18 @@ def run_script(request, agent_id): | ||||
|             timeout=req_timeout, | ||||
|             wait=True, | ||||
|             history_pk=history_pk, | ||||
|             run_as_user=run_as_user, | ||||
|         ) | ||||
|  | ||||
|         Note.objects.create(agent=agent, user=request.user, note=r) | ||||
|         return Response(r) | ||||
|     else: | ||||
|         agent.run_script( | ||||
|             scriptpk=script.pk, args=args, timeout=req_timeout, history_pk=history_pk | ||||
|             scriptpk=script.pk, | ||||
|             args=args, | ||||
|             timeout=req_timeout, | ||||
|             history_pk=history_pk, | ||||
|             run_as_user=run_as_user, | ||||
|         ) | ||||
|  | ||||
|     return Response(f"{script.name} will now be run on {agent.hostname}") | ||||
| @@ -723,7 +796,7 @@ class GetAddNotes(APIView): | ||||
|             agent = get_object_or_404(Agent, agent_id=agent_id) | ||||
|             notes = Note.objects.filter(agent=agent) | ||||
|         else: | ||||
|             notes = Note.objects.filter_by_role(request.user) | ||||
|             notes = Note.objects.filter_by_role(request.user)  # type: ignore | ||||
|  | ||||
|         return Response(AgentNoteSerializer(notes, many=True).data) | ||||
|  | ||||
| @@ -788,37 +861,37 @@ def bulk(request): | ||||
|     if request.data["target"] == "client": | ||||
|         if not _has_perm_on_client(request.user, request.data["client"]): | ||||
|             raise PermissionDenied() | ||||
|         q = Agent.objects.filter_by_role(request.user).filter( | ||||
|         q = Agent.objects.filter_by_role(request.user).filter(  # type: ignore | ||||
|             site__client_id=request.data["client"] | ||||
|         ) | ||||
|  | ||||
|     elif request.data["target"] == "site": | ||||
|         if not _has_perm_on_site(request.user, request.data["site"]): | ||||
|             raise PermissionDenied() | ||||
|         q = Agent.objects.filter_by_role(request.user).filter( | ||||
|         q = Agent.objects.filter_by_role(request.user).filter(  # type: ignore | ||||
|             site_id=request.data["site"] | ||||
|         ) | ||||
|  | ||||
|     elif request.data["target"] == "agents": | ||||
|         q = Agent.objects.filter_by_role(request.user).filter( | ||||
|         q = Agent.objects.filter_by_role(request.user).filter(  # type: ignore | ||||
|             agent_id__in=request.data["agents"] | ||||
|         ) | ||||
|  | ||||
|     elif request.data["target"] == "all": | ||||
|         q = Agent.objects.filter_by_role(request.user).only("pk", "monitoring_type") | ||||
|         q = Agent.objects.filter_by_role(request.user).only("pk", "monitoring_type")  # type: ignore | ||||
|  | ||||
|     else: | ||||
|         return notify_error("Something went wrong") | ||||
|  | ||||
|     if request.data["monType"] == "servers": | ||||
|         q = q.filter(monitoring_type="server") | ||||
|         q = q.filter(monitoring_type=AgentMonType.SERVER) | ||||
|     elif request.data["monType"] == "workstations": | ||||
|         q = q.filter(monitoring_type="workstation") | ||||
|         q = q.filter(monitoring_type=AgentMonType.WORKSTATION) | ||||
|  | ||||
|     if request.data["osType"] == "windows": | ||||
|         q = q.filter(plat="windows") | ||||
|     elif request.data["osType"] == "linux": | ||||
|         q = q.filter(plat="linux") | ||||
|     if request.data["osType"] == AgentPlat.WINDOWS: | ||||
|         q = q.filter(plat=AgentPlat.WINDOWS) | ||||
|     elif request.data["osType"] == AgentPlat.LINUX: | ||||
|         q = q.filter(plat=AgentPlat.LINUX) | ||||
|  | ||||
|     agents: list[int] = [agent.pk for agent in q] | ||||
|  | ||||
| @@ -844,7 +917,7 @@ def bulk(request): | ||||
|             shell, | ||||
|             request.data["timeout"], | ||||
|             request.user.username[:50], | ||||
|             run_on_offline=request.data["offlineAgents"], | ||||
|             request.data["run_as_user"], | ||||
|         ) | ||||
|         return Response(f"Command will now be run on {len(agents)} agents") | ||||
|  | ||||
| @@ -856,6 +929,7 @@ def bulk(request): | ||||
|             request.data["args"], | ||||
|             request.data["timeout"], | ||||
|             request.user.username[:50], | ||||
|             request.data["run_as_user"], | ||||
|         ) | ||||
|         return Response(f"{script.name} will now be run on {len(agents)} agents") | ||||
|  | ||||
| @@ -882,7 +956,7 @@ def agent_maintenance(request): | ||||
|             raise PermissionDenied() | ||||
|  | ||||
|         count = ( | ||||
|             Agent.objects.filter_by_role(request.user) | ||||
|             Agent.objects.filter_by_role(request.user)  # type: ignore | ||||
|             .filter(site__client_id=request.data["id"]) | ||||
|             .update(maintenance_mode=request.data["action"]) | ||||
|         ) | ||||
| @@ -892,7 +966,7 @@ def agent_maintenance(request): | ||||
|             raise PermissionDenied() | ||||
|  | ||||
|         count = ( | ||||
|             Agent.objects.filter_by_role(request.user) | ||||
|             Agent.objects.filter_by_role(request.user)  # type: ignore | ||||
|             .filter(site_id=request.data["id"]) | ||||
|             .update(maintenance_mode=request.data["action"]) | ||||
|         ) | ||||
| @@ -909,6 +983,13 @@ def agent_maintenance(request): | ||||
|         ) | ||||
|  | ||||
|  | ||||
| @api_view(["GET"]) | ||||
| @permission_classes([IsAuthenticated, RecoverAgentPerms]) | ||||
| def bulk_agent_recovery(request): | ||||
|     bulk_recover_agents_task.delay() | ||||
|     return Response("Agents will now be recovered") | ||||
|  | ||||
|  | ||||
| class WMI(APIView): | ||||
|     permission_classes = [IsAuthenticated, AgentPerms] | ||||
|  | ||||
| @@ -928,6 +1009,6 @@ class AgentHistoryView(APIView): | ||||
|             agent = get_object_or_404(Agent, agent_id=agent_id) | ||||
|             history = AgentHistory.objects.filter(agent=agent) | ||||
|         else: | ||||
|             history = AgentHistory.objects.filter_by_role(request.user) | ||||
|             history = AgentHistory.objects.filter_by_role(request.user)  # type: ignore | ||||
|         ctx = {"default_tz": get_default_timezone()} | ||||
|         return Response(AgentHistorySerializer(history, many=True, context=ctx).data) | ||||
|   | ||||
							
								
								
									
										24
									
								
								api/tacticalrmm/alerts/migrations/0011_alter_alert_agent.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								api/tacticalrmm/alerts/migrations/0011_alter_alert_agent.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | ||||
| # Generated by Django 4.0.3 on 2022-04-07 17:28 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| def delete_alerts_without_agent(apps, schema): | ||||
|     Alert = apps.get_model("alerts", "Alert") | ||||
|  | ||||
|     Alert.objects.filter(agent=None).delete() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("agents", "0047_alter_agent_plat_alter_agent_site"), | ||||
|         ("alerts", "0010_auto_20210917_1954"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython( | ||||
|             delete_alerts_without_agent, reverse_code=migrations.RunPython.noop | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,23 @@ | ||||
| # Generated by Django 4.0.5 on 2022-06-29 07:57 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('alerts', '0011_alter_alert_agent'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='alert', | ||||
|             name='action_retcode', | ||||
|             field=models.BigIntegerField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_retcode', | ||||
|             field=models.BigIntegerField(blank=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,34 +1,28 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| import re | ||||
| from typing import TYPE_CHECKING, Union | ||||
| from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast | ||||
|  | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.db import models | ||||
| from django.db.models.fields import BooleanField, PositiveIntegerField | ||||
| from django.utils import timezone as djangotime | ||||
| from logs.models import BaseAuditModel, DebugLog | ||||
|  | ||||
| from logs.models import BaseAuditModel, DebugLog | ||||
| from tacticalrmm.constants import ( | ||||
|     AgentMonType, | ||||
|     AlertSeverity, | ||||
|     AlertType, | ||||
|     CheckType, | ||||
|     DebugLogType, | ||||
| ) | ||||
| from tacticalrmm.models import PermissionQuerySet | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from agents.models import Agent | ||||
|     from autotasks.models import AutomatedTask | ||||
|     from checks.models import Check | ||||
|  | ||||
|  | ||||
| SEVERITY_CHOICES = [ | ||||
|     ("info", "Informational"), | ||||
|     ("warning", "Warning"), | ||||
|     ("error", "Error"), | ||||
| ] | ||||
|  | ||||
| ALERT_TYPE_CHOICES = [ | ||||
|     ("availability", "Availability"), | ||||
|     ("check", "Check"), | ||||
|     ("task", "Task"), | ||||
|     ("custom", "Custom"), | ||||
| ] | ||||
|     from autotasks.models import AutomatedTask, TaskResult | ||||
|     from checks.models import Check, CheckResult | ||||
|     from clients.models import Client, Site | ||||
|  | ||||
|  | ||||
| class Alert(models.Model): | ||||
| @@ -56,7 +50,7 @@ class Alert(models.Model): | ||||
|         blank=True, | ||||
|     ) | ||||
|     alert_type = models.CharField( | ||||
|         max_length=20, choices=ALERT_TYPE_CHOICES, default="availability" | ||||
|         max_length=20, choices=AlertType.choices, default=AlertType.AVAILABILITY | ||||
|     ) | ||||
|     message = models.TextField(null=True, blank=True) | ||||
|     alert_time = models.DateTimeField(auto_now_add=True, null=True, blank=True) | ||||
| @@ -64,7 +58,9 @@ class Alert(models.Model): | ||||
|     snooze_until = models.DateTimeField(null=True, blank=True) | ||||
|     resolved = models.BooleanField(default=False) | ||||
|     resolved_on = models.DateTimeField(null=True, blank=True) | ||||
|     severity = models.CharField(max_length=30, choices=SEVERITY_CHOICES, default="info") | ||||
|     severity = models.CharField( | ||||
|         max_length=30, choices=AlertSeverity.choices, default=AlertSeverity.INFO | ||||
|     ) | ||||
|     email_sent = models.DateTimeField(null=True, blank=True) | ||||
|     resolved_email_sent = models.DateTimeField(null=True, blank=True) | ||||
|     sms_sent = models.DateTimeField(null=True, blank=True) | ||||
| @@ -73,72 +69,208 @@ class Alert(models.Model): | ||||
|     action_run = models.DateTimeField(null=True, blank=True) | ||||
|     action_stdout = models.TextField(null=True, blank=True) | ||||
|     action_stderr = models.TextField(null=True, blank=True) | ||||
|     action_retcode = models.IntegerField(null=True, blank=True) | ||||
|     action_retcode = models.BigIntegerField(null=True, blank=True) | ||||
|     action_execution_time = models.CharField(max_length=100, null=True, blank=True) | ||||
|     resolved_action_run = models.DateTimeField(null=True, blank=True) | ||||
|     resolved_action_stdout = models.TextField(null=True, blank=True) | ||||
|     resolved_action_stderr = models.TextField(null=True, blank=True) | ||||
|     resolved_action_retcode = models.IntegerField(null=True, blank=True) | ||||
|     resolved_action_retcode = models.BigIntegerField(null=True, blank=True) | ||||
|     resolved_action_execution_time = models.CharField( | ||||
|         max_length=100, null=True, blank=True | ||||
|     ) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.message | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.alert_type} - {self.message}" | ||||
|  | ||||
|     def resolve(self): | ||||
|     @property | ||||
|     def assigned_agent(self) -> "Optional[Agent]": | ||||
|         return self.agent | ||||
|  | ||||
|     @property | ||||
|     def site(self) -> "Site": | ||||
|         return self.agent.site | ||||
|  | ||||
|     @property | ||||
|     def client(self) -> "Client": | ||||
|         return self.agent.client | ||||
|  | ||||
|     def resolve(self) -> None: | ||||
|         self.resolved = True | ||||
|         self.resolved_on = djangotime.now() | ||||
|         self.snoozed = False | ||||
|         self.snooze_until = None | ||||
|         self.save() | ||||
|         self.save(update_fields=["resolved", "resolved_on", "snoozed", "snooze_until"]) | ||||
|  | ||||
|     @classmethod | ||||
|     def create_or_return_availability_alert(cls, agent): | ||||
|         if not cls.objects.filter(agent=agent, resolved=False).exists(): | ||||
|             return cls.objects.create( | ||||
|                 agent=agent, | ||||
|                 alert_type="availability", | ||||
|                 severity="error", | ||||
|                 message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.", | ||||
|                 hidden=True, | ||||
|     def create_or_return_availability_alert( | ||||
|         cls, agent: Agent, skip_create: bool = False | ||||
|     ) -> Optional[Alert]: | ||||
|         if not cls.objects.filter( | ||||
|             agent=agent, alert_type=AlertType.AVAILABILITY, resolved=False | ||||
|         ).exists(): | ||||
|             if skip_create: | ||||
|                 return None | ||||
|  | ||||
|             return cast( | ||||
|                 Alert, | ||||
|                 cls.objects.create( | ||||
|                     agent=agent, | ||||
|                     alert_type=AlertType.AVAILABILITY, | ||||
|                     severity=AlertSeverity.ERROR, | ||||
|                     message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.", | ||||
|                     hidden=True, | ||||
|                 ), | ||||
|             ) | ||||
|         else: | ||||
|             return cls.objects.get(agent=agent, resolved=False) | ||||
|             try: | ||||
|                 return cast( | ||||
|                     Alert, | ||||
|                     cls.objects.get( | ||||
|                         agent=agent, alert_type=AlertType.AVAILABILITY, resolved=False | ||||
|                     ), | ||||
|                 ) | ||||
|             except cls.MultipleObjectsReturned: | ||||
|                 alerts = cls.objects.filter( | ||||
|                     agent=agent, alert_type=AlertType.AVAILABILITY, resolved=False | ||||
|                 ) | ||||
|  | ||||
|                 last_alert = cast(Alert, alerts.last()) | ||||
|  | ||||
|                 # cycle through other alerts and resolve | ||||
|                 for alert in alerts: | ||||
|                     if alert.id != last_alert.pk: | ||||
|                         alert.resolve() | ||||
|  | ||||
|                 return last_alert | ||||
|             except cls.DoesNotExist: | ||||
|                 return None | ||||
|  | ||||
|     @classmethod | ||||
|     def create_or_return_check_alert(cls, check): | ||||
|     def create_or_return_check_alert( | ||||
|         cls, | ||||
|         check: "Check", | ||||
|         agent: "Agent", | ||||
|         alert_severity: Optional[str] = None, | ||||
|         skip_create: bool = False, | ||||
|     ) -> "Optional[Alert]": | ||||
|  | ||||
|         if not cls.objects.filter(assigned_check=check, resolved=False).exists(): | ||||
|             return cls.objects.create( | ||||
|                 assigned_check=check, | ||||
|                 alert_type="check", | ||||
|                 severity=check.alert_severity, | ||||
|                 message=f"{check.agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.", | ||||
|                 hidden=True, | ||||
|         # need to pass agent if the check is a policy | ||||
|         if not cls.objects.filter( | ||||
|             assigned_check=check, | ||||
|             agent=agent, | ||||
|             resolved=False, | ||||
|         ).exists(): | ||||
|             if skip_create: | ||||
|                 return None | ||||
|  | ||||
|             return cast( | ||||
|                 Alert, | ||||
|                 cls.objects.create( | ||||
|                     assigned_check=check, | ||||
|                     agent=agent, | ||||
|                     alert_type=AlertType.CHECK, | ||||
|                     severity=check.alert_severity | ||||
|                     if check.check_type | ||||
|                     not in [ | ||||
|                         CheckType.MEMORY, | ||||
|                         CheckType.CPU_LOAD, | ||||
|                         CheckType.DISK_SPACE, | ||||
|                         CheckType.SCRIPT, | ||||
|                     ] | ||||
|                     else alert_severity, | ||||
|                     message=f"{agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.", | ||||
|                     hidden=True, | ||||
|                 ), | ||||
|             ) | ||||
|         else: | ||||
|             return cls.objects.get(assigned_check=check, resolved=False) | ||||
|             try: | ||||
|                 return cast( | ||||
|                     Alert, | ||||
|                     cls.objects.get( | ||||
|                         assigned_check=check, | ||||
|                         agent=agent, | ||||
|                         resolved=False, | ||||
|                     ), | ||||
|                 ) | ||||
|             except cls.MultipleObjectsReturned: | ||||
|                 alerts = cls.objects.filter( | ||||
|                     assigned_check=check, | ||||
|                     agent=agent, | ||||
|                     resolved=False, | ||||
|                 ) | ||||
|                 last_alert = cast(Alert, alerts.last()) | ||||
|  | ||||
|                 # cycle through other alerts and resolve | ||||
|                 for alert in alerts: | ||||
|                     if alert.id != last_alert.pk: | ||||
|                         alert.resolve() | ||||
|  | ||||
|                 return last_alert | ||||
|             except cls.DoesNotExist: | ||||
|                 return None | ||||
|  | ||||
|     @classmethod | ||||
|     def create_or_return_task_alert(cls, task): | ||||
|     def create_or_return_task_alert( | ||||
|         cls, | ||||
|         task: "AutomatedTask", | ||||
|         agent: "Agent", | ||||
|         skip_create: bool = False, | ||||
|     ) -> "Optional[Alert]": | ||||
|  | ||||
|         if not cls.objects.filter(assigned_task=task, resolved=False).exists(): | ||||
|             return cls.objects.create( | ||||
|                 assigned_task=task, | ||||
|                 alert_type="task", | ||||
|                 severity=task.alert_severity, | ||||
|                 message=f"{task.agent.hostname} has task: {task.name} that failed.", | ||||
|                 hidden=True, | ||||
|         if not cls.objects.filter( | ||||
|             assigned_task=task, | ||||
|             agent=agent, | ||||
|             resolved=False, | ||||
|         ).exists(): | ||||
|             if skip_create: | ||||
|                 return None | ||||
|  | ||||
|             return cast( | ||||
|                 Alert, | ||||
|                 cls.objects.create( | ||||
|                     assigned_task=task, | ||||
|                     agent=agent, | ||||
|                     alert_type=AlertType.TASK, | ||||
|                     severity=task.alert_severity, | ||||
|                     message=f"{agent.hostname} has task: {task.name} that failed.", | ||||
|                     hidden=True, | ||||
|                 ), | ||||
|             ) | ||||
|  | ||||
|         else: | ||||
|             return cls.objects.get(assigned_task=task, resolved=False) | ||||
|             try: | ||||
|                 return cast( | ||||
|                     Alert, | ||||
|                     cls.objects.get( | ||||
|                         assigned_task=task, | ||||
|                         agent=agent, | ||||
|                         resolved=False, | ||||
|                     ), | ||||
|                 ) | ||||
|             except cls.MultipleObjectsReturned: | ||||
|                 alerts = cls.objects.filter( | ||||
|                     assigned_task=task, | ||||
|                     agent=agent, | ||||
|                     resolved=False, | ||||
|                 ) | ||||
|                 last_alert = cast(Alert, alerts.last()) | ||||
|  | ||||
|                 # cycle through other alerts and resolve | ||||
|                 for alert in alerts: | ||||
|                     if alert.id != last_alert.pk: | ||||
|                         alert.resolve() | ||||
|  | ||||
|                 return last_alert | ||||
|             except cls.DoesNotExist: | ||||
|                 return None | ||||
|  | ||||
|     @classmethod | ||||
|     def handle_alert_failure(cls, instance: Union[Agent, AutomatedTask, Check]) -> None: | ||||
|     def handle_alert_failure( | ||||
|         cls, instance: Union[Agent, TaskResult, CheckResult] | ||||
|     ) -> None: | ||||
|         from agents.models import Agent | ||||
|         from autotasks.models import AutomatedTask | ||||
|         from checks.models import Check | ||||
|         from autotasks.models import TaskResult | ||||
|         from checks.models import CheckResult | ||||
|  | ||||
|         # set variables | ||||
|         dashboard_severities = None | ||||
| @@ -150,6 +282,7 @@ class Alert(models.Model): | ||||
|         alert_interval = None | ||||
|         email_task = None | ||||
|         text_task = None | ||||
|         run_script_action = None | ||||
|  | ||||
|         # check what the instance passed is | ||||
|         if isinstance(instance, Agent): | ||||
| @@ -163,30 +296,21 @@ class Alert(models.Model): | ||||
|             dashboard_alert = instance.overdue_dashboard_alert | ||||
|             alert_template = instance.alert_template | ||||
|             maintenance_mode = instance.maintenance_mode | ||||
|             alert_severity = "error" | ||||
|             alert_severity = AlertSeverity.ERROR | ||||
|             agent = instance | ||||
|             dashboard_severities = [AlertSeverity.ERROR] | ||||
|             email_severities = [AlertSeverity.ERROR] | ||||
|             text_severities = [AlertSeverity.ERROR] | ||||
|  | ||||
|             # set alert_template settings | ||||
|             if alert_template: | ||||
|                 dashboard_severities = ["error"] | ||||
|                 email_severities = ["error"] | ||||
|                 text_severities = ["error"] | ||||
|                 always_dashboard = alert_template.agent_always_alert | ||||
|                 always_email = alert_template.agent_always_email | ||||
|                 always_text = alert_template.agent_always_text | ||||
|                 alert_interval = alert_template.agent_periodic_alert_days | ||||
|                 run_script_action = alert_template.agent_script_actions | ||||
|  | ||||
|             if instance.should_create_alert(alert_template): | ||||
|                 alert = cls.create_or_return_availability_alert(instance) | ||||
|             else: | ||||
|                 # check if there is an alert that exists | ||||
|                 if cls.objects.filter(agent=instance, resolved=False).exists(): | ||||
|                     alert = cls.objects.get(agent=instance, resolved=False) | ||||
|                 else: | ||||
|                     alert = None | ||||
|  | ||||
|         elif isinstance(instance, Check): | ||||
|         elif isinstance(instance, CheckResult): | ||||
|             from checks.tasks import ( | ||||
|                 handle_check_email_alert_task, | ||||
|                 handle_check_sms_alert_task, | ||||
| @@ -195,75 +319,98 @@ class Alert(models.Model): | ||||
|             email_task = handle_check_email_alert_task | ||||
|             text_task = handle_check_sms_alert_task | ||||
|  | ||||
|             email_alert = instance.email_alert | ||||
|             text_alert = instance.text_alert | ||||
|             dashboard_alert = instance.dashboard_alert | ||||
|             email_alert = instance.assigned_check.email_alert | ||||
|             text_alert = instance.assigned_check.text_alert | ||||
|             dashboard_alert = instance.assigned_check.dashboard_alert | ||||
|             alert_template = instance.agent.alert_template | ||||
|             maintenance_mode = instance.agent.maintenance_mode | ||||
|             alert_severity = instance.alert_severity | ||||
|             alert_severity = ( | ||||
|                 instance.assigned_check.alert_severity | ||||
|                 if instance.assigned_check.check_type | ||||
|                 not in [ | ||||
|                     CheckType.MEMORY, | ||||
|                     CheckType.CPU_LOAD, | ||||
|                     CheckType.DISK_SPACE, | ||||
|                     CheckType.SCRIPT, | ||||
|                 ] | ||||
|                 else instance.alert_severity | ||||
|             ) | ||||
|             agent = instance.agent | ||||
|  | ||||
|             # set alert_template settings | ||||
|             if alert_template: | ||||
|                 dashboard_severities = alert_template.check_dashboard_alert_severity | ||||
|                 email_severities = alert_template.check_email_alert_severity | ||||
|                 text_severities = alert_template.check_text_alert_severity | ||||
|                 dashboard_severities = ( | ||||
|                     alert_template.check_dashboard_alert_severity | ||||
|                     if alert_template.check_dashboard_alert_severity | ||||
|                     else [ | ||||
|                         AlertSeverity.ERROR, | ||||
|                         AlertSeverity.WARNING, | ||||
|                         AlertSeverity.INFO, | ||||
|                     ] | ||||
|                 ) | ||||
|                 email_severities = ( | ||||
|                     alert_template.check_email_alert_severity | ||||
|                     if alert_template.check_email_alert_severity | ||||
|                     else [AlertSeverity.ERROR, AlertSeverity.WARNING] | ||||
|                 ) | ||||
|                 text_severities = ( | ||||
|                     alert_template.check_text_alert_severity | ||||
|                     if alert_template.check_text_alert_severity | ||||
|                     else [AlertSeverity.ERROR, AlertSeverity.WARNING] | ||||
|                 ) | ||||
|                 always_dashboard = alert_template.check_always_alert | ||||
|                 always_email = alert_template.check_always_email | ||||
|                 always_text = alert_template.check_always_text | ||||
|                 alert_interval = alert_template.check_periodic_alert_days | ||||
|                 run_script_action = alert_template.check_script_actions | ||||
|  | ||||
|             if instance.should_create_alert(alert_template): | ||||
|                 alert = cls.create_or_return_check_alert(instance) | ||||
|             else: | ||||
|                 # check if there is an alert that exists | ||||
|                 if cls.objects.filter(assigned_check=instance, resolved=False).exists(): | ||||
|                     alert = cls.objects.get(assigned_check=instance, resolved=False) | ||||
|                 else: | ||||
|                     alert = None | ||||
|  | ||||
|         elif isinstance(instance, AutomatedTask): | ||||
|         elif isinstance(instance, TaskResult): | ||||
|             from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert | ||||
|  | ||||
|             email_task = handle_task_email_alert | ||||
|             text_task = handle_task_sms_alert | ||||
|  | ||||
|             email_alert = instance.email_alert | ||||
|             text_alert = instance.text_alert | ||||
|             dashboard_alert = instance.dashboard_alert | ||||
|             email_alert = instance.task.email_alert | ||||
|             text_alert = instance.task.text_alert | ||||
|             dashboard_alert = instance.task.dashboard_alert | ||||
|             alert_template = instance.agent.alert_template | ||||
|             maintenance_mode = instance.agent.maintenance_mode | ||||
|             alert_severity = instance.alert_severity | ||||
|             alert_severity = instance.task.alert_severity | ||||
|             agent = instance.agent | ||||
|  | ||||
|             # set alert_template settings | ||||
|             if alert_template: | ||||
|                 dashboard_severities = alert_template.task_dashboard_alert_severity | ||||
|                 email_severities = alert_template.task_email_alert_severity | ||||
|                 text_severities = alert_template.task_text_alert_severity | ||||
|                 dashboard_severities = ( | ||||
|                     alert_template.task_dashboard_alert_severity | ||||
|                     if alert_template.task_dashboard_alert_severity | ||||
|                     else [AlertSeverity.ERROR, AlertSeverity.WARNING] | ||||
|                 ) | ||||
|                 email_severities = ( | ||||
|                     alert_template.task_email_alert_severity | ||||
|                     if alert_template.task_email_alert_severity | ||||
|                     else [AlertSeverity.ERROR, AlertSeverity.WARNING] | ||||
|                 ) | ||||
|                 text_severities = ( | ||||
|                     alert_template.task_text_alert_severity | ||||
|                     if alert_template.task_text_alert_severity | ||||
|                     else [AlertSeverity.ERROR, AlertSeverity.WARNING] | ||||
|                 ) | ||||
|                 always_dashboard = alert_template.task_always_alert | ||||
|                 always_email = alert_template.task_always_email | ||||
|                 always_text = alert_template.task_always_text | ||||
|                 alert_interval = alert_template.task_periodic_alert_days | ||||
|                 run_script_action = alert_template.task_script_actions | ||||
|  | ||||
|             if instance.should_create_alert(alert_template): | ||||
|                 alert = cls.create_or_return_task_alert(instance) | ||||
|             else: | ||||
|                 # check if there is an alert that exists | ||||
|                 if cls.objects.filter(assigned_task=instance, resolved=False).exists(): | ||||
|                     alert = cls.objects.get(assigned_task=instance, resolved=False) | ||||
|                 else: | ||||
|                     alert = None | ||||
|         else: | ||||
|             return | ||||
|  | ||||
|         alert = instance.get_or_create_alert_if_needed(alert_template) | ||||
|  | ||||
|         # return if agent is in maintenance mode | ||||
|         if maintenance_mode or not alert: | ||||
|         if not alert or maintenance_mode: | ||||
|             return | ||||
|  | ||||
|         # check if alert severity changed on check and update the alert | ||||
|         # check if alert severity changed and update the alert | ||||
|         if alert_severity != alert.severity: | ||||
|             alert.severity = alert_severity | ||||
|             alert.save(update_fields=["severity"]) | ||||
| @@ -272,19 +419,25 @@ class Alert(models.Model): | ||||
|         if dashboard_alert or always_dashboard: | ||||
|  | ||||
|             # check if alert template is set and specific severities are configured | ||||
|             if alert_template and alert.severity not in dashboard_severities:  # type: ignore | ||||
|                 pass | ||||
|             else: | ||||
|             if ( | ||||
|                 not alert_template | ||||
|                 or alert_template | ||||
|                 and dashboard_severities | ||||
|                 and alert.severity in dashboard_severities | ||||
|             ): | ||||
|                 alert.hidden = False | ||||
|                 alert.save() | ||||
|                 alert.save(update_fields=["hidden"]) | ||||
|  | ||||
|         # send email if enabled | ||||
|         if email_alert or always_email: | ||||
|  | ||||
|             # check if alert template is set and specific severities are configured | ||||
|             if alert_template and alert.severity not in email_severities:  # type: ignore | ||||
|                 pass | ||||
|             else: | ||||
|             if ( | ||||
|                 not alert_template | ||||
|                 or alert_template | ||||
|                 and email_severities | ||||
|                 and alert.severity in email_severities | ||||
|             ): | ||||
|                 email_task.delay( | ||||
|                     pk=alert.pk, | ||||
|                     alert_interval=alert_interval, | ||||
| @@ -294,13 +447,21 @@ class Alert(models.Model): | ||||
|         if text_alert or always_text: | ||||
|  | ||||
|             # check if alert template is set and specific severities are configured | ||||
|             if alert_template and alert.severity not in text_severities:  # type: ignore | ||||
|                 pass | ||||
|             else: | ||||
|             if ( | ||||
|                 not alert_template | ||||
|                 or alert_template | ||||
|                 and text_severities | ||||
|                 and alert.severity in text_severities | ||||
|             ): | ||||
|                 text_task.delay(pk=alert.pk, alert_interval=alert_interval) | ||||
|  | ||||
|         # check if any scripts should be run | ||||
|         if alert_template and alert_template.action and run_script_action and not alert.action_run:  # type: ignore | ||||
|         if ( | ||||
|             alert_template | ||||
|             and alert_template.action | ||||
|             and run_script_action | ||||
|             and not alert.action_run | ||||
|         ): | ||||
|             r = agent.run_script( | ||||
|                 scriptpk=alert_template.action.pk, | ||||
|                 args=alert.parse_script_args(alert_template.action_args), | ||||
| @@ -308,10 +469,11 @@ class Alert(models.Model): | ||||
|                 wait=True, | ||||
|                 full=True, | ||||
|                 run_on_any=True, | ||||
|                 run_as_user=False, | ||||
|             ) | ||||
|  | ||||
|             # command was successful | ||||
|             if type(r) == dict: | ||||
|             if isinstance(r, dict): | ||||
|                 alert.action_retcode = r["retcode"] | ||||
|                 alert.action_stdout = r["stdout"] | ||||
|                 alert.action_stderr = r["stderr"] | ||||
| @@ -321,21 +483,24 @@ class Alert(models.Model): | ||||
|             else: | ||||
|                 DebugLog.error( | ||||
|                     agent=agent, | ||||
|                     log_type="scripting", | ||||
|                     log_type=DebugLogType.SCRIPTING, | ||||
|                     message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert", | ||||
|                 ) | ||||
|  | ||||
|     @classmethod | ||||
|     def handle_alert_resolve(cls, instance: Union[Agent, AutomatedTask, Check]) -> None: | ||||
|     def handle_alert_resolve( | ||||
|         cls, instance: Union[Agent, TaskResult, CheckResult] | ||||
|     ) -> None: | ||||
|         from agents.models import Agent | ||||
|         from autotasks.models import AutomatedTask | ||||
|         from checks.models import Check | ||||
|         from autotasks.models import TaskResult | ||||
|         from checks.models import CheckResult | ||||
|  | ||||
|         # set variables | ||||
|         email_on_resolved = False | ||||
|         text_on_resolved = False | ||||
|         resolved_email_task = None | ||||
|         resolved_text_task = None | ||||
|         run_script_action = None | ||||
|  | ||||
|         # check what the instance passed is | ||||
|         if isinstance(instance, Agent): | ||||
| @@ -345,7 +510,6 @@ class Alert(models.Model): | ||||
|             resolved_text_task = agent_recovery_sms_task | ||||
|  | ||||
|             alert_template = instance.alert_template | ||||
|             alert = cls.objects.get(agent=instance, resolved=False) | ||||
|             maintenance_mode = instance.maintenance_mode | ||||
|             agent = instance | ||||
|  | ||||
| @@ -354,7 +518,12 @@ class Alert(models.Model): | ||||
|                 text_on_resolved = alert_template.agent_text_on_resolved | ||||
|                 run_script_action = alert_template.agent_script_actions | ||||
|  | ||||
|         elif isinstance(instance, Check): | ||||
|             if agent.overdue_email_alert: | ||||
|                 email_on_resolved = True | ||||
|             if agent.overdue_text_alert: | ||||
|                 text_on_resolved = True | ||||
|  | ||||
|         elif isinstance(instance, CheckResult): | ||||
|             from checks.tasks import ( | ||||
|                 handle_resolved_check_email_alert_task, | ||||
|                 handle_resolved_check_sms_alert_task, | ||||
| @@ -364,7 +533,6 @@ class Alert(models.Model): | ||||
|             resolved_text_task = handle_resolved_check_sms_alert_task | ||||
|  | ||||
|             alert_template = instance.agent.alert_template | ||||
|             alert = cls.objects.get(assigned_check=instance, resolved=False) | ||||
|             maintenance_mode = instance.agent.maintenance_mode | ||||
|             agent = instance.agent | ||||
|  | ||||
| @@ -373,7 +541,7 @@ class Alert(models.Model): | ||||
|                 text_on_resolved = alert_template.check_text_on_resolved | ||||
|                 run_script_action = alert_template.check_script_actions | ||||
|  | ||||
|         elif isinstance(instance, AutomatedTask): | ||||
|         elif isinstance(instance, TaskResult): | ||||
|             from autotasks.tasks import ( | ||||
|                 handle_resolved_task_email_alert, | ||||
|                 handle_resolved_task_sms_alert, | ||||
| @@ -383,7 +551,6 @@ class Alert(models.Model): | ||||
|             resolved_text_task = handle_resolved_task_sms_alert | ||||
|  | ||||
|             alert_template = instance.agent.alert_template | ||||
|             alert = cls.objects.get(assigned_task=instance, resolved=False) | ||||
|             maintenance_mode = instance.agent.maintenance_mode | ||||
|             agent = instance.agent | ||||
|  | ||||
| @@ -395,8 +562,10 @@ class Alert(models.Model): | ||||
|         else: | ||||
|             return | ||||
|  | ||||
|         alert = instance.get_or_create_alert_if_needed(alert_template) | ||||
|  | ||||
|         # return if agent is in maintenance mode | ||||
|         if maintenance_mode: | ||||
|         if not alert or maintenance_mode: | ||||
|             return | ||||
|  | ||||
|         alert.resolve() | ||||
| @@ -413,7 +582,7 @@ class Alert(models.Model): | ||||
|         if ( | ||||
|             alert_template | ||||
|             and alert_template.resolved_action | ||||
|             and run_script_action  # type: ignore | ||||
|             and run_script_action | ||||
|             and not alert.resolved_action_run | ||||
|         ): | ||||
|             r = agent.run_script( | ||||
| @@ -423,10 +592,11 @@ class Alert(models.Model): | ||||
|                 wait=True, | ||||
|                 full=True, | ||||
|                 run_on_any=True, | ||||
|                 run_as_user=False, | ||||
|             ) | ||||
|  | ||||
|             # command was successful | ||||
|             if type(r) == dict: | ||||
|             if isinstance(r, dict): | ||||
|                 alert.resolved_action_retcode = r["retcode"] | ||||
|                 alert.resolved_action_stdout = r["stdout"] | ||||
|                 alert.resolved_action_stderr = r["stderr"] | ||||
| @@ -438,11 +608,11 @@ class Alert(models.Model): | ||||
|             else: | ||||
|                 DebugLog.error( | ||||
|                     agent=agent, | ||||
|                     log_type="scripting", | ||||
|                     log_type=DebugLogType.SCRIPTING, | ||||
|                     message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert", | ||||
|                 ) | ||||
|  | ||||
|     def parse_script_args(self, args: list[str]): | ||||
|     def parse_script_args(self, args: List[str]) -> List[str]: | ||||
|  | ||||
|         if not args: | ||||
|             return [] | ||||
| @@ -463,9 +633,9 @@ class Alert(models.Model): | ||||
|                     continue | ||||
|  | ||||
|                 try: | ||||
|                     temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg))  # type: ignore | ||||
|                     temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) | ||||
|                 except Exception as e: | ||||
|                     DebugLog.error(log_type="scripting", message=str(e)) | ||||
|                     DebugLog.error(log_type=DebugLogType.SCRIPTING, message=str(e)) | ||||
|                     continue | ||||
|  | ||||
|             else: | ||||
| @@ -535,17 +705,17 @@ class AlertTemplate(BaseAuditModel): | ||||
|  | ||||
|     # check alert settings | ||||
|     check_email_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         models.CharField(max_length=25, blank=True, choices=AlertSeverity.choices), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     check_text_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         models.CharField(max_length=25, blank=True, choices=AlertSeverity.choices), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     check_dashboard_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         models.CharField(max_length=25, blank=True, choices=AlertSeverity.choices), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
| @@ -559,17 +729,17 @@ class AlertTemplate(BaseAuditModel): | ||||
|  | ||||
|     # task alert settings | ||||
|     task_email_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         models.CharField(max_length=25, blank=True, choices=AlertSeverity.choices), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     task_text_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         models.CharField(max_length=25, blank=True, choices=AlertSeverity.choices), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     task_dashboard_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         models.CharField(max_length=25, blank=True, choices=AlertSeverity.choices), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
| @@ -595,22 +765,22 @@ class AlertTemplate(BaseAuditModel): | ||||
|         "agents.Agent", related_name="alert_exclusions", blank=True | ||||
|     ) | ||||
|  | ||||
|     def __str__(self): | ||||
|     def __str__(self) -> str: | ||||
|         return self.name | ||||
|  | ||||
|     def is_agent_excluded(self, agent): | ||||
|     def is_agent_excluded(self, agent: "Agent") -> bool: | ||||
|         return ( | ||||
|             agent in self.excluded_agents.all() | ||||
|             or agent.site in self.excluded_sites.all() | ||||
|             or agent.client in self.excluded_clients.all() | ||||
|             or agent.monitoring_type == "workstation" | ||||
|             or agent.monitoring_type == AgentMonType.WORKSTATION | ||||
|             and self.exclude_workstations | ||||
|             or agent.monitoring_type == "server" | ||||
|             or agent.monitoring_type == AgentMonType.SERVER | ||||
|             and self.exclude_servers | ||||
|         ) | ||||
|  | ||||
|     @staticmethod | ||||
|     def serialize(alert_template): | ||||
|     def serialize(alert_template: AlertTemplate) -> Dict[str, Any]: | ||||
|         # serializes the agent and returns json | ||||
|         from .serializers import AlertTemplateAuditSerializer | ||||
|  | ||||
|   | ||||
| @@ -1,10 +1,15 @@ | ||||
| from typing import TYPE_CHECKING | ||||
|  | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from rest_framework import permissions | ||||
|  | ||||
| from tacticalrmm.permissions import _has_perm, _has_perm_on_agent | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from accounts.models import User | ||||
|  | ||||
| def _has_perm_on_alert(user, id: int): | ||||
|  | ||||
| def _has_perm_on_alert(user: "User", id: int) -> bool: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     role = user.role | ||||
| @@ -19,10 +24,6 @@ def _has_perm_on_alert(user, id: int): | ||||
|  | ||||
|     if alert.agent: | ||||
|         agent_id = alert.agent.agent_id | ||||
|     elif alert.assigned_check: | ||||
|         agent_id = alert.assigned_check.agent.agent_id | ||||
|     elif alert.assigned_task: | ||||
|         agent_id = alert.assigned_task.agent.agent_id | ||||
|     else: | ||||
|         return True | ||||
|  | ||||
| @@ -30,7 +31,7 @@ def _has_perm_on_alert(user, id: int): | ||||
|  | ||||
|  | ||||
| class AlertPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         if r.method == "GET" or r.method == "PATCH": | ||||
|             if "pk" in view.kwargs.keys(): | ||||
|                 return _has_perm(r, "can_list_alerts") and _has_perm_on_alert( | ||||
| @@ -48,7 +49,7 @@ class AlertPerms(permissions.BasePermission): | ||||
|  | ||||
|  | ||||
| class AlertTemplatePerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         if r.method == "GET": | ||||
|             return _has_perm(r, "can_list_alerttemplates") | ||||
|         else: | ||||
|   | ||||
| @@ -1,105 +1,19 @@ | ||||
| from automation.serializers import PolicySerializer | ||||
| from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| from rest_framework.serializers import ModelSerializer, ReadOnlyField | ||||
|  | ||||
| from tacticalrmm.utils import get_default_timezone | ||||
| from automation.serializers import PolicySerializer | ||||
| from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer | ||||
|  | ||||
| from .models import Alert, AlertTemplate | ||||
|  | ||||
|  | ||||
| class AlertSerializer(ModelSerializer): | ||||
|  | ||||
|     hostname = SerializerMethodField() | ||||
|     agent_id = SerializerMethodField() | ||||
|     client = SerializerMethodField() | ||||
|     site = SerializerMethodField() | ||||
|     alert_time = SerializerMethodField() | ||||
|     resolve_on = SerializerMethodField() | ||||
|     snoozed_until = SerializerMethodField() | ||||
|  | ||||
|     def get_agent_id(self, instance): | ||||
|         if instance.alert_type == "availability": | ||||
|             return instance.agent.agent_id if instance.agent else "" | ||||
|         elif instance.alert_type == "check": | ||||
|             return ( | ||||
|                 instance.assigned_check.agent.agent_id | ||||
|                 if instance.assigned_check | ||||
|                 else "" | ||||
|             ) | ||||
|         elif instance.alert_type == "task": | ||||
|             return ( | ||||
|                 instance.assigned_task.agent.agent_id if instance.assigned_task else "" | ||||
|             ) | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def get_hostname(self, instance): | ||||
|         if instance.alert_type == "availability": | ||||
|             return instance.agent.hostname if instance.agent else "" | ||||
|         elif instance.alert_type == "check": | ||||
|             return ( | ||||
|                 instance.assigned_check.agent.hostname | ||||
|                 if instance.assigned_check | ||||
|                 else "" | ||||
|             ) | ||||
|         elif instance.alert_type == "task": | ||||
|             return ( | ||||
|                 instance.assigned_task.agent.hostname if instance.assigned_task else "" | ||||
|             ) | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def get_client(self, instance): | ||||
|         if instance.alert_type == "availability": | ||||
|             return instance.agent.client.name if instance.agent else "" | ||||
|         elif instance.alert_type == "check": | ||||
|             return ( | ||||
|                 instance.assigned_check.agent.client.name | ||||
|                 if instance.assigned_check | ||||
|                 else "" | ||||
|             ) | ||||
|         elif instance.alert_type == "task": | ||||
|             return ( | ||||
|                 instance.assigned_task.agent.client.name | ||||
|                 if instance.assigned_task | ||||
|                 else "" | ||||
|             ) | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def get_site(self, instance): | ||||
|         if instance.alert_type == "availability": | ||||
|             return instance.agent.site.name if instance.agent else "" | ||||
|         elif instance.alert_type == "check": | ||||
|             return ( | ||||
|                 instance.assigned_check.agent.site.name | ||||
|                 if instance.assigned_check | ||||
|                 else "" | ||||
|             ) | ||||
|         elif instance.alert_type == "task": | ||||
|             return ( | ||||
|                 instance.assigned_task.agent.site.name if instance.assigned_task else "" | ||||
|             ) | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def get_alert_time(self, instance): | ||||
|         if instance.alert_time: | ||||
|             return instance.alert_time.astimezone(get_default_timezone()).timestamp() | ||||
|         else: | ||||
|             return None | ||||
|  | ||||
|     def get_resolve_on(self, instance): | ||||
|         if instance.resolved_on: | ||||
|             return instance.resolved_on.astimezone(get_default_timezone()).timestamp() | ||||
|         else: | ||||
|             return None | ||||
|  | ||||
|     def get_snoozed_until(self, instance): | ||||
|         if instance.snooze_until: | ||||
|             return instance.snooze_until.astimezone(get_default_timezone()).timestamp() | ||||
|         return None | ||||
|     hostname = ReadOnlyField(source="assigned_agent.hostname") | ||||
|     agent_id = ReadOnlyField(source="assigned_agent.agent_id") | ||||
|     client = ReadOnlyField(source="client.name") | ||||
|     site = ReadOnlyField(source="site.name") | ||||
|     alert_time = ReadOnlyField() | ||||
|  | ||||
|     class Meta: | ||||
|         model = Alert | ||||
| @@ -121,11 +35,11 @@ class AlertTemplateSerializer(ModelSerializer): | ||||
|         fields = "__all__" | ||||
|  | ||||
|     def get_applied_count(self, instance): | ||||
|         count = 0 | ||||
|         count += instance.policies.count() | ||||
|         count += instance.clients.count() | ||||
|         count += instance.sites.count() | ||||
|         return count | ||||
|         return ( | ||||
|             instance.policies.count() | ||||
|             + instance.clients.count() | ||||
|             + instance.sites.count() | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class AlertTemplateRelationSerializer(ModelSerializer): | ||||
|   | ||||
| @@ -1,12 +1,13 @@ | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from agents.models import Agent | ||||
| from tacticalrmm.celery import app | ||||
|  | ||||
| from .models import Alert | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def unsnooze_alerts() -> str: | ||||
|     from .models import Alert | ||||
|  | ||||
|     Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update( | ||||
|         snoozed=False, snooze_until=None | ||||
|     ) | ||||
| @@ -15,10 +16,10 @@ def unsnooze_alerts() -> str: | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def cache_agents_alert_template(): | ||||
|     from agents.models import Agent | ||||
|  | ||||
|     for agent in Agent.objects.only("pk"): | ||||
| def cache_agents_alert_template() -> str: | ||||
|     for agent in Agent.objects.only( | ||||
|         "pk", "site", "policy", "alert_template" | ||||
|     ).select_related("site", "policy", "alert_template"): | ||||
|         agent.set_alert_template() | ||||
|  | ||||
|     return "ok" | ||||
| @@ -26,8 +27,6 @@ def cache_agents_alert_template(): | ||||
|  | ||||
| @app.task | ||||
| def prune_resolved_alerts(older_than_days: int) -> str: | ||||
|     from .models import Alert | ||||
|  | ||||
|     Alert.objects.filter(resolved=True).filter( | ||||
|         alert_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days) | ||||
|     ).delete() | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -7,7 +7,7 @@ from rest_framework.permissions import IsAuthenticated | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from tacticalrmm.utils import notify_error | ||||
| from tacticalrmm.helpers import notify_error | ||||
|  | ||||
| from .models import Alert, AlertTemplate | ||||
| from .permissions import AlertPerms, AlertTemplatePerms | ||||
| @@ -92,7 +92,7 @@ class GetAddAlerts(APIView): | ||||
|                 ) | ||||
|  | ||||
|             alerts = ( | ||||
|                 Alert.objects.filter_by_role(request.user) | ||||
|                 Alert.objects.filter_by_role(request.user)  # type: ignore | ||||
|                 .filter(clientFilter) | ||||
|                 .filter(severityFilter) | ||||
|                 .filter(resolvedFilter) | ||||
| @@ -102,7 +102,7 @@ class GetAddAlerts(APIView): | ||||
|             return Response(AlertSerializer(alerts, many=True).data) | ||||
|  | ||||
|         else: | ||||
|             alerts = Alert.objects.filter_by_role(request.user) | ||||
|             alerts = Alert.objects.filter_by_role(request.user)  # type: ignore | ||||
|             return Response(AlertSerializer(alerts, many=True).data) | ||||
|  | ||||
|     def post(self, request): | ||||
|   | ||||
							
								
								
									
										0
									
								
								api/tacticalrmm/apiv3/tests/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								api/tacticalrmm/apiv3/tests/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -1,11 +1,8 @@ | ||||
| import json | ||||
| import os | ||||
| 
 | ||||
| from autotasks.models import AutomatedTask | ||||
| from django.conf import settings | ||||
| from django.utils import timezone as djangotime | ||||
| from model_bakery import baker | ||||
| 
 | ||||
| from autotasks.models import TaskResult | ||||
| from tacticalrmm.constants import CustomFieldModel, CustomFieldType, TaskStatus | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
| 
 | ||||
| 
 | ||||
| @@ -16,46 +13,53 @@ class TestAPIv3(TacticalTestCase): | ||||
|         self.agent = baker.make_recipe("agents.agent") | ||||
| 
 | ||||
|     def test_get_checks(self): | ||||
|         url = f"/api/v3/{self.agent.agent_id}/checkrunner/" | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         url = f"/api/v3/{agent.agent_id}/checkrunner/" | ||||
| 
 | ||||
|         # add a check | ||||
|         check1 = baker.make_recipe("checks.ping_check", agent=self.agent) | ||||
|         check1 = baker.make_recipe("checks.ping_check", agent=agent) | ||||
|         check_result1 = baker.make( | ||||
|             "checks.CheckResult", agent=agent, assigned_check=check1 | ||||
|         ) | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data["check_interval"], self.agent.check_interval)  # type: ignore | ||||
|         self.assertEqual(len(r.data["checks"]), 1)  # type: ignore | ||||
|         self.assertEqual(r.data["check_interval"], self.agent.check_interval) | ||||
|         self.assertEqual(len(r.data["checks"]), 1) | ||||
| 
 | ||||
|         # override check run interval | ||||
|         check2 = baker.make_recipe( | ||||
|             "checks.ping_check", agent=self.agent, run_interval=20 | ||||
|             "checks.diskspace_check", agent=agent, run_interval=20 | ||||
|         ) | ||||
|         check_result2 = baker.make( | ||||
|             "checks.CheckResult", agent=agent, assigned_check=check2 | ||||
|         ) | ||||
| 
 | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data["check_interval"], 20)  # type: ignore | ||||
|         self.assertEqual(len(r.data["checks"]), 2)  # type: ignore | ||||
|         self.assertEqual(len(r.data["checks"]), 2) | ||||
|         self.assertEqual(r.data["check_interval"], 20) | ||||
| 
 | ||||
|         # Set last_run on both checks and should return an empty list | ||||
|         check1.last_run = djangotime.now() | ||||
|         check1.save() | ||||
|         check2.last_run = djangotime.now() | ||||
|         check2.save() | ||||
|         check_result1.last_run = djangotime.now() | ||||
|         check_result1.save() | ||||
|         check_result2.last_run = djangotime.now() | ||||
|         check_result2.save() | ||||
| 
 | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data["check_interval"], 20)  # type: ignore | ||||
|         self.assertFalse(r.data["checks"])  # type: ignore | ||||
|         self.assertEqual(r.data["check_interval"], 20) | ||||
|         self.assertFalse(r.data["checks"]) | ||||
| 
 | ||||
|         # set last_run greater than interval | ||||
|         check1.last_run = djangotime.now() - djangotime.timedelta(seconds=200) | ||||
|         check1.save() | ||||
|         check2.last_run = djangotime.now() - djangotime.timedelta(seconds=200) | ||||
|         check2.save() | ||||
|         check_result1.last_run = djangotime.now() - djangotime.timedelta(seconds=200) | ||||
|         check_result1.save() | ||||
|         check_result2.last_run = djangotime.now() - djangotime.timedelta(seconds=200) | ||||
|         check_result2.save() | ||||
| 
 | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data["check_interval"], 20)  # type: ignore | ||||
|         self.assertEquals(len(r.data["checks"]), 2)  # type: ignore | ||||
|         self.assertEqual(r.data["check_interval"], 20) | ||||
|         self.assertEqual(len(r.data["checks"]), 2) | ||||
| 
 | ||||
|         url = "/api/v3/Maj34ACb324j234asdj2n34kASDjh34-DESKTOPTEST123/checkrunner/" | ||||
|         r = self.client.get(url) | ||||
| @@ -63,24 +67,6 @@ class TestAPIv3(TacticalTestCase): | ||||
| 
 | ||||
|         self.check_not_authenticated("get", url) | ||||
| 
 | ||||
|     def test_sysinfo(self): | ||||
|         # TODO replace this with golang wmi sample data | ||||
| 
 | ||||
|         url = "/api/v3/sysinfo/" | ||||
|         with open( | ||||
|             os.path.join( | ||||
|                 settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json" | ||||
|             ) | ||||
|         ) as f: | ||||
|             wmi_py = json.load(f) | ||||
| 
 | ||||
|         payload = {"agent_id": self.agent.agent_id, "sysinfo": wmi_py} | ||||
| 
 | ||||
|         r = self.client.patch(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
| 
 | ||||
|         self.check_not_authenticated("patch", url) | ||||
| 
 | ||||
|     def test_checkrunner_interval(self): | ||||
|         url = f"/api/v3/{self.agent.agent_id}/checkinterval/" | ||||
|         r = self.client.get(url, format="json") | ||||
| @@ -130,21 +116,30 @@ class TestAPIv3(TacticalTestCase): | ||||
|         self.assertEqual(len(r.json()["checks"]), 15) | ||||
| 
 | ||||
|     def test_task_runner_get(self): | ||||
|         from autotasks.serializers import TaskGOGetSerializer | ||||
| 
 | ||||
|         r = self.client.get("/api/v3/500/asdf9df9dfdf/taskrunner/") | ||||
|         self.assertEqual(r.status_code, 404) | ||||
| 
 | ||||
|         # setup data | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         script = baker.make_recipe("scripts.script") | ||||
|         task = baker.make("autotasks.AutomatedTask", agent=agent, script=script) | ||||
|         script = baker.make("scripts.script") | ||||
| 
 | ||||
|         url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/"  # type: ignore | ||||
|         # setup data | ||||
|         task_actions = [ | ||||
|             {"type": "cmd", "command": "whoami", "timeout": 10, "shell": "cmd"}, | ||||
|             { | ||||
|                 "type": "script", | ||||
|                 "script": script.id, | ||||
|                 "script_args": ["test"], | ||||
|                 "timeout": 30, | ||||
|             }, | ||||
|             {"type": "script", "script": 3, "script_args": [], "timeout": 30}, | ||||
|         ] | ||||
| 
 | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         task = baker.make("autotasks.AutomatedTask", agent=agent, actions=task_actions) | ||||
| 
 | ||||
|         url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" | ||||
| 
 | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(TaskGOGetSerializer(task).data, r.data)  # type: ignore | ||||
| 
 | ||||
|     def test_task_runner_results(self): | ||||
|         from agents.models import AgentCustomField | ||||
| @@ -155,8 +150,9 @@ class TestAPIv3(TacticalTestCase): | ||||
|         # setup data | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         task = baker.make("autotasks.AutomatedTask", agent=agent) | ||||
|         task_result = baker.make("autotasks.TaskResult", agent=agent, task=task) | ||||
| 
 | ||||
|         url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/"  # type: ignore | ||||
|         url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" | ||||
| 
 | ||||
|         # test passing task | ||||
|         data = { | ||||
| @@ -168,7 +164,9 @@ class TestAPIv3(TacticalTestCase): | ||||
| 
 | ||||
|         r = self.client.patch(url, data) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "passing")  # type: ignore | ||||
|         self.assertTrue( | ||||
|             TaskResult.objects.get(pk=task_result.pk).status == TaskStatus.PASSING | ||||
|         ) | ||||
| 
 | ||||
|         # test failing task | ||||
|         data = { | ||||
| @@ -180,20 +178,33 @@ class TestAPIv3(TacticalTestCase): | ||||
| 
 | ||||
|         r = self.client.patch(url, data) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing")  # type: ignore | ||||
|         self.assertTrue( | ||||
|             TaskResult.objects.get(pk=task_result.pk).status == TaskStatus.FAILING | ||||
|         ) | ||||
| 
 | ||||
|         # test collector task | ||||
|         text = baker.make("core.CustomField", model="agent", type="text", name="Test") | ||||
|         text = baker.make( | ||||
|             "core.CustomField", | ||||
|             model=CustomFieldModel.AGENT, | ||||
|             type=CustomFieldType.TEXT, | ||||
|             name="Test", | ||||
|         ) | ||||
|         boolean = baker.make( | ||||
|             "core.CustomField", model="agent", type="checkbox", name="Test1" | ||||
|             "core.CustomField", | ||||
|             model=CustomFieldModel.AGENT, | ||||
|             type=CustomFieldType.CHECKBOX, | ||||
|             name="Test1", | ||||
|         ) | ||||
|         multiple = baker.make( | ||||
|             "core.CustomField", model="agent", type="multiple", name="Test2" | ||||
|             "core.CustomField", | ||||
|             model=CustomFieldModel.AGENT, | ||||
|             type=CustomFieldType.MULTIPLE, | ||||
|             name="Test2", | ||||
|         ) | ||||
| 
 | ||||
|         # test text fields | ||||
|         task.custom_field = text  # type: ignore | ||||
|         task.save()  # type: ignore | ||||
|         task.custom_field = text | ||||
|         task.save() | ||||
| 
 | ||||
|         # test failing failing with stderr | ||||
|         data = { | ||||
| @@ -205,7 +216,9 @@ class TestAPIv3(TacticalTestCase): | ||||
| 
 | ||||
|         r = self.client.patch(url, data) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing")  # type: ignore | ||||
|         self.assertTrue( | ||||
|             TaskResult.objects.get(pk=task_result.pk).status == TaskStatus.FAILING | ||||
|         ) | ||||
| 
 | ||||
|         # test saving to text field | ||||
|         data = { | ||||
| @@ -217,12 +230,17 @@ class TestAPIv3(TacticalTestCase): | ||||
| 
 | ||||
|         r = self.client.patch(url, data) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing")  # type: ignore | ||||
|         self.assertEqual(AgentCustomField.objects.get(field=text, agent=task.agent).value, "the last line")  # type: ignore | ||||
|         self.assertEqual( | ||||
|             TaskResult.objects.get(pk=task_result.pk).status, TaskStatus.PASSING | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             AgentCustomField.objects.get(field=text, agent=task.agent).value, | ||||
|             "the last line", | ||||
|         ) | ||||
| 
 | ||||
|         # test saving to checkbox field | ||||
|         task.custom_field = boolean  # type: ignore | ||||
|         task.save()  # type: ignore | ||||
|         task.custom_field = boolean | ||||
|         task.save() | ||||
| 
 | ||||
|         data = { | ||||
|             "stdout": "1", | ||||
| @@ -233,12 +251,16 @@ class TestAPIv3(TacticalTestCase): | ||||
| 
 | ||||
|         r = self.client.patch(url, data) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing")  # type: ignore | ||||
|         self.assertTrue(AgentCustomField.objects.get(field=boolean, agent=task.agent).value)  # type: ignore | ||||
|         self.assertEqual( | ||||
|             TaskResult.objects.get(pk=task_result.pk).status, TaskStatus.PASSING | ||||
|         ) | ||||
|         self.assertTrue( | ||||
|             AgentCustomField.objects.get(field=boolean, agent=task.agent).value | ||||
|         ) | ||||
| 
 | ||||
|         # test saving to multiple field with commas | ||||
|         task.custom_field = multiple  # type: ignore | ||||
|         task.save()  # type: ignore | ||||
|         task.custom_field = multiple | ||||
|         task.save() | ||||
| 
 | ||||
|         data = { | ||||
|             "stdout": "this,is,an,array", | ||||
| @@ -249,8 +271,13 @@ class TestAPIv3(TacticalTestCase): | ||||
| 
 | ||||
|         r = self.client.patch(url, data) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing")  # type: ignore | ||||
|         self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this", "is", "an", "array"])  # type: ignore | ||||
|         self.assertEqual( | ||||
|             TaskResult.objects.get(pk=task_result.pk).status, TaskStatus.PASSING | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             AgentCustomField.objects.get(field=multiple, agent=task.agent).value, | ||||
|             ["this", "is", "an", "array"], | ||||
|         ) | ||||
| 
 | ||||
|         # test mutiple with a single value | ||||
|         data = { | ||||
| @@ -262,5 +289,10 @@ class TestAPIv3(TacticalTestCase): | ||||
| 
 | ||||
|         r = self.client.patch(url, data) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing")  # type: ignore | ||||
|         self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this"])  # type: ignore | ||||
|         self.assertEqual( | ||||
|             TaskResult.objects.get(pk=task_result.pk).status, TaskStatus.PASSING | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             AgentCustomField.objects.get(field=multiple, agent=task.agent).value, | ||||
|             ["this"], | ||||
|         ) | ||||
| @@ -9,7 +9,6 @@ urlpatterns = [ | ||||
|     path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()), | ||||
|     path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()), | ||||
|     path("meshexe/", views.MeshExe.as_view()), | ||||
|     path("sysinfo/", views.SysInfo.as_view()), | ||||
|     path("newagent/", views.NewAgent.as_view()), | ||||
|     path("software/", views.Software.as_view()), | ||||
|     path("installer/", views.Installer.as_view()), | ||||
|   | ||||
| @@ -1,30 +1,47 @@ | ||||
| import asyncio | ||||
| import time | ||||
|  | ||||
| from accounts.models import User | ||||
| from agents.models import Agent, AgentHistory | ||||
| from agents.serializers import AgentHistorySerializer | ||||
| from autotasks.models import AutomatedTask | ||||
| from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer | ||||
| from checks.models import Check | ||||
| from checks.serializers import CheckRunnerGetSerializer | ||||
| from core.models import CoreSettings | ||||
| from core.utils import download_mesh_agent, get_mesh_device_id, get_mesh_ws_url | ||||
| from django.conf import settings | ||||
| from django.db.models import Prefetch | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.utils import timezone as djangotime | ||||
| from logs.models import DebugLog, PendingAction | ||||
| from packaging import version as pyver | ||||
| from rest_framework.authentication import TokenAuthentication | ||||
| from rest_framework.authtoken.models import Token | ||||
| from rest_framework.permissions import IsAuthenticated | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
| from software.models import InstalledSoftware | ||||
| from winupdate.models import WinUpdate, WinUpdatePolicy | ||||
|  | ||||
| from tacticalrmm.constants import MeshAgentIdent | ||||
| from tacticalrmm.utils import notify_error, reload_nats | ||||
| from accounts.models import User | ||||
| from agents.models import Agent, AgentHistory | ||||
| from agents.serializers import AgentHistorySerializer | ||||
| from autotasks.models import AutomatedTask, TaskResult | ||||
| from autotasks.serializers import TaskGOGetSerializer, TaskResultSerializer | ||||
| from checks.constants import CHECK_DEFER, CHECK_RESULT_DEFER | ||||
| from checks.models import Check, CheckResult | ||||
| from checks.serializers import CheckRunnerGetSerializer | ||||
| from core.utils import ( | ||||
|     download_mesh_agent, | ||||
|     get_core_settings, | ||||
|     get_mesh_device_id, | ||||
|     get_mesh_ws_url, | ||||
| ) | ||||
| from logs.models import DebugLog, PendingAction | ||||
| from software.models import InstalledSoftware | ||||
| from tacticalrmm.constants import ( | ||||
|     AGENT_DEFER, | ||||
|     AgentMonType, | ||||
|     AgentPlat, | ||||
|     AuditActionType, | ||||
|     AuditObjType, | ||||
|     CheckStatus, | ||||
|     DebugLogType, | ||||
|     GoArch, | ||||
|     MeshAgentIdent, | ||||
|     PAStatus, | ||||
| ) | ||||
| from tacticalrmm.helpers import notify_error | ||||
| from tacticalrmm.utils import reload_nats | ||||
| from winupdate.models import WinUpdate, WinUpdatePolicy | ||||
|  | ||||
|  | ||||
| class CheckIn(APIView): | ||||
| @@ -34,11 +51,12 @@ class CheckIn(APIView): | ||||
|  | ||||
|     # called once during tacticalagent windows service startup | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         agent = get_object_or_404( | ||||
|             Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"] | ||||
|         ) | ||||
|         if not agent.choco_installed: | ||||
|             asyncio.run(agent.nats_cmd({"func": "installchoco"}, wait=False)) | ||||
|  | ||||
|         time.sleep(0.5) | ||||
|         asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False)) | ||||
|         return Response("ok") | ||||
|  | ||||
| @@ -48,7 +66,9 @@ class SyncMeshNodeID(APIView): | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         agent = get_object_or_404( | ||||
|             Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"] | ||||
|         ) | ||||
|         if agent.mesh_node_id != request.data["nodeid"]: | ||||
|             agent.mesh_node_id = request.data["nodeid"] | ||||
|             agent.save(update_fields=["mesh_node_id"]) | ||||
| @@ -61,7 +81,9 @@ class Choco(APIView): | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         agent = get_object_or_404( | ||||
|             Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"] | ||||
|         ) | ||||
|         agent.choco_installed = request.data["installed"] | ||||
|         agent.save(update_fields=["choco_installed"]) | ||||
|         return Response("ok") | ||||
| @@ -72,7 +94,9 @@ class WinUpdates(APIView): | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def put(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         agent = get_object_or_404( | ||||
|             Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"] | ||||
|         ) | ||||
|  | ||||
|         needs_reboot: bool = request.data["needs_reboot"] | ||||
|         agent.needs_reboot = needs_reboot | ||||
| @@ -90,7 +114,7 @@ class WinUpdates(APIView): | ||||
|             asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False)) | ||||
|             DebugLog.info( | ||||
|                 agent=agent, | ||||
|                 log_type="windows_updates", | ||||
|                 log_type=DebugLogType.WIN_UPDATES, | ||||
|                 message=f"{agent.hostname} is rebooting after updates were installed.", | ||||
|             ) | ||||
|  | ||||
| @@ -98,8 +122,13 @@ class WinUpdates(APIView): | ||||
|         return Response("ok") | ||||
|  | ||||
|     def patch(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         agent = get_object_or_404( | ||||
|             Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"] | ||||
|         ) | ||||
|         u = agent.winupdates.filter(guid=request.data["guid"]).last()  # type: ignore | ||||
|         if not u: | ||||
|             raise WinUpdate.DoesNotExist | ||||
|  | ||||
|         success: bool = request.data["success"] | ||||
|         if success: | ||||
|             u.result = "success" | ||||
| @@ -122,8 +151,14 @@ class WinUpdates(APIView): | ||||
|         return Response("ok") | ||||
|  | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         updates = request.data["wua_updates"] | ||||
|         if not updates: | ||||
|             return notify_error("Empty payload") | ||||
|  | ||||
|         agent = get_object_or_404( | ||||
|             Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"] | ||||
|         ) | ||||
|  | ||||
|         for update in updates: | ||||
|             if agent.winupdates.filter(guid=update["guid"]).exists():  # type: ignore | ||||
|                 u = agent.winupdates.filter(guid=update["guid"]).last()  # type: ignore | ||||
| @@ -162,7 +197,9 @@ class SupersededWinUpdate(APIView): | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         agent = get_object_or_404( | ||||
|             Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"] | ||||
|         ) | ||||
|         updates = agent.winupdates.filter(guid=request.data["guid"])  # type: ignore | ||||
|         for u in updates: | ||||
|             u.delete() | ||||
| @@ -175,12 +212,19 @@ class RunChecks(APIView): | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def get(self, request, agentid): | ||||
|         agent = get_object_or_404(Agent, agent_id=agentid) | ||||
|         checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False) | ||||
|         agent = get_object_or_404( | ||||
|             Agent.objects.defer(*AGENT_DEFER).prefetch_related( | ||||
|                 Prefetch("agentchecks", queryset=Check.objects.select_related("script")) | ||||
|             ), | ||||
|             agent_id=agentid, | ||||
|         ) | ||||
|         checks = agent.get_checks_with_policies(exclude_overridden=True) | ||||
|         ret = { | ||||
|             "agent": agent.pk, | ||||
|             "check_interval": agent.check_interval, | ||||
|             "checks": CheckRunnerGetSerializer(checks, many=True).data, | ||||
|             "checks": CheckRunnerGetSerializer( | ||||
|                 checks, context={"agent": agent}, many=True | ||||
|             ).data, | ||||
|         } | ||||
|         return Response(ret) | ||||
|  | ||||
| @@ -190,45 +234,72 @@ class CheckRunner(APIView): | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def get(self, request, agentid): | ||||
|         agent = get_object_or_404(Agent, agent_id=agentid) | ||||
|         checks = agent.agentchecks.filter(overriden_by_policy=False)  # type: ignore | ||||
|         agent = get_object_or_404( | ||||
|             Agent.objects.defer(*AGENT_DEFER).prefetch_related( | ||||
|                 Prefetch("agentchecks", queryset=Check.objects.select_related("script")) | ||||
|             ), | ||||
|             agent_id=agentid, | ||||
|         ) | ||||
|         checks = agent.get_checks_with_policies(exclude_overridden=True) | ||||
|  | ||||
|         run_list = [ | ||||
|             check | ||||
|             for check in checks | ||||
|             # always run if check hasn't run yet | ||||
|             if not check.last_run | ||||
|             # if a check interval is set, see if the correct amount of seconds have passed | ||||
|             if not isinstance(check.check_result, CheckResult) | ||||
|             or not check.check_result.last_run | ||||
|             # see if the correct amount of seconds have passed | ||||
|             or ( | ||||
|                 check.run_interval | ||||
|                 and ( | ||||
|                     check.last_run | ||||
|                     < djangotime.now() | ||||
|                     - djangotime.timedelta(seconds=check.run_interval) | ||||
|                 check.check_result.last_run | ||||
|                 < djangotime.now() | ||||
|                 - djangotime.timedelta( | ||||
|                     seconds=check.run_interval | ||||
|                     if check.run_interval | ||||
|                     else agent.check_interval | ||||
|                 ) | ||||
|             ) | ||||
|             # if check interval isn't set, make sure the agent's check interval has passed before running | ||||
|             or ( | ||||
|                 not check.run_interval | ||||
|                 and check.last_run | ||||
|                 < djangotime.now() - djangotime.timedelta(seconds=agent.check_interval) | ||||
|             ) | ||||
|         ] | ||||
|  | ||||
|         ret = { | ||||
|             "agent": agent.pk, | ||||
|             "check_interval": agent.check_run_interval(), | ||||
|             "checks": CheckRunnerGetSerializer(run_list, many=True).data, | ||||
|             "checks": CheckRunnerGetSerializer( | ||||
|                 run_list, context={"agent": agent}, many=True | ||||
|             ).data, | ||||
|         } | ||||
|         return Response(ret) | ||||
|  | ||||
|     def patch(self, request): | ||||
|         check = get_object_or_404(Check, pk=request.data["id"]) | ||||
|         if "agent_id" not in request.data.keys(): | ||||
|             return notify_error("Agent upgrade required") | ||||
|  | ||||
|         check.last_run = djangotime.now() | ||||
|         check.save(update_fields=["last_run"]) | ||||
|         status = check.handle_check(request.data) | ||||
|         if status == "failing" and check.assignedtask.exists():  # type: ignore | ||||
|             check.handle_assigned_task() | ||||
|         check = get_object_or_404( | ||||
|             Check.objects.defer(*CHECK_DEFER), | ||||
|             pk=request.data["id"], | ||||
|         ) | ||||
|         agent = get_object_or_404( | ||||
|             Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"] | ||||
|         ) | ||||
|  | ||||
|         # get check result or create if doesn't exist | ||||
|         check_result, created = CheckResult.objects.defer( | ||||
|             *CHECK_RESULT_DEFER | ||||
|         ).get_or_create( | ||||
|             assigned_check=check, | ||||
|             agent=agent, | ||||
|         ) | ||||
|  | ||||
|         if created: | ||||
|             check_result.save() | ||||
|  | ||||
|         status = check_result.handle_check(request.data, check, agent) | ||||
|         if status == CheckStatus.FAILING and check.assignedtasks.exists(): | ||||
|             for task in check.assignedtasks.all(): | ||||
|                 if task.enabled: | ||||
|                     if task.policy: | ||||
|                         task.run_win_task(agent) | ||||
|                     else: | ||||
|                         task.run_win_task() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
| @@ -238,7 +309,10 @@ class CheckRunnerInterval(APIView): | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def get(self, request, agentid): | ||||
|         agent = get_object_or_404(Agent, agent_id=agentid) | ||||
|         agent = get_object_or_404( | ||||
|             Agent.objects.defer(*AGENT_DEFER).prefetch_related("agentchecks"), | ||||
|             agent_id=agentid, | ||||
|         ) | ||||
|  | ||||
|         return Response( | ||||
|             {"agent": agent.pk, "check_interval": agent.check_run_interval()} | ||||
| @@ -250,65 +324,71 @@ class TaskRunner(APIView): | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def get(self, request, pk, agentid): | ||||
|         _ = get_object_or_404(Agent, agent_id=agentid) | ||||
|         agent = get_object_or_404(Agent.objects.defer(*AGENT_DEFER), agent_id=agentid) | ||||
|         task = get_object_or_404(AutomatedTask, pk=pk) | ||||
|         return Response(TaskGOGetSerializer(task).data) | ||||
|         return Response(TaskGOGetSerializer(task, context={"agent": agent}).data) | ||||
|  | ||||
|     def patch(self, request, pk, agentid): | ||||
|         from alerts.models import Alert | ||||
|  | ||||
|         agent = get_object_or_404(Agent, agent_id=agentid) | ||||
|         task = get_object_or_404(AutomatedTask, pk=pk) | ||||
|  | ||||
|         serializer = TaskRunnerPatchSerializer( | ||||
|             instance=task, data=request.data, partial=True | ||||
|         agent = get_object_or_404( | ||||
|             Agent.objects.defer(*AGENT_DEFER), | ||||
|             agent_id=agentid, | ||||
|         ) | ||||
|         task = get_object_or_404( | ||||
|             AutomatedTask.objects.select_related("custom_field"), pk=pk | ||||
|         ) | ||||
|  | ||||
|         # get task result or create if doesn't exist | ||||
|         try: | ||||
|             task_result = ( | ||||
|                 TaskResult.objects.select_related("agent") | ||||
|                 .defer("agent__services", "agent__wmi_detail") | ||||
|                 .get(task=task, agent=agent) | ||||
|             ) | ||||
|             serializer = TaskResultSerializer( | ||||
|                 data=request.data, instance=task_result, partial=True | ||||
|             ) | ||||
|         except TaskResult.DoesNotExist: | ||||
|             serializer = TaskResultSerializer(data=request.data, partial=True) | ||||
|  | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         new_task = serializer.save(last_run=djangotime.now()) | ||||
|         task_result = serializer.save(last_run=djangotime.now()) | ||||
|  | ||||
|         AgentHistory.objects.create( | ||||
|             agent=agent, | ||||
|             type="task_run", | ||||
|             script=task.script, | ||||
|             type=AuditActionType.TASK_RUN, | ||||
|             command=task.name, | ||||
|             script_results=request.data, | ||||
|         ) | ||||
|  | ||||
|         # check if task is a collector and update the custom field | ||||
|         if task.custom_field: | ||||
|             if not task.stderr: | ||||
|             if not task_result.stderr: | ||||
|  | ||||
|                 task.save_collector_results() | ||||
|                 task_result.save_collector_results() | ||||
|  | ||||
|                 status = "passing" | ||||
|                 status = CheckStatus.PASSING | ||||
|             else: | ||||
|                 status = "failing" | ||||
|                 status = CheckStatus.FAILING | ||||
|         else: | ||||
|             status = "failing" if task.retcode != 0 else "passing" | ||||
|             status = ( | ||||
|                 CheckStatus.FAILING if task_result.retcode != 0 else CheckStatus.PASSING | ||||
|             ) | ||||
|  | ||||
|         new_task.status = status | ||||
|         new_task.save() | ||||
|  | ||||
|         if status == "passing": | ||||
|             if Alert.objects.filter(assigned_task=new_task, resolved=False).exists(): | ||||
|                 Alert.handle_alert_resolve(new_task) | ||||
|         if task_result: | ||||
|             task_result.status = status | ||||
|             task_result.save(update_fields=["status"]) | ||||
|         else: | ||||
|             Alert.handle_alert_failure(new_task) | ||||
|             task_result.status = status | ||||
|             task.save(update_fields=["status"]) | ||||
|  | ||||
|         return Response("ok") | ||||
|         if status == CheckStatus.PASSING: | ||||
|             if Alert.create_or_return_task_alert(task, agent=agent, skip_create=True): | ||||
|                 Alert.handle_alert_resolve(task_result) | ||||
|         else: | ||||
|             Alert.handle_alert_failure(task_result) | ||||
|  | ||||
|  | ||||
| class SysInfo(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def patch(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|  | ||||
|         if not isinstance(request.data["sysinfo"], dict): | ||||
|             return notify_error("err") | ||||
|  | ||||
|         agent.wmi_detail = request.data["sysinfo"] | ||||
|         agent.save(update_fields=["wmi_detail"]) | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| @@ -317,14 +397,14 @@ class MeshExe(APIView): | ||||
|  | ||||
|     def post(self, request): | ||||
|         match request.data: | ||||
|             case {"arch": "64", "plat": "windows"}: | ||||
|             case {"goarch": GoArch.AMD64, "plat": AgentPlat.WINDOWS}: | ||||
|                 arch = MeshAgentIdent.WIN64 | ||||
|             case {"arch": "32", "plat": "windows"}: | ||||
|             case {"goarch": GoArch.i386, "plat": AgentPlat.WINDOWS}: | ||||
|                 arch = MeshAgentIdent.WIN32 | ||||
|             case _: | ||||
|                 return notify_error("Arch not specified") | ||||
|  | ||||
|         core: CoreSettings = CoreSettings.objects.first()  # type: ignore | ||||
|         core = get_core_settings() | ||||
|  | ||||
|         try: | ||||
|             uri = get_mesh_ws_url() | ||||
| @@ -377,7 +457,7 @@ class NewAgent(APIView): | ||||
|  | ||||
|         token = Token.objects.create(user=user) | ||||
|  | ||||
|         if agent.monitoring_type == "workstation": | ||||
|         if agent.monitoring_type == AgentMonType.WORKSTATION: | ||||
|             WinUpdatePolicy(agent=agent, run_time_days=[5, 6]).save() | ||||
|         else: | ||||
|             WinUpdatePolicy(agent=agent).save() | ||||
| @@ -388,8 +468,8 @@ class NewAgent(APIView): | ||||
|         AuditLog.objects.create( | ||||
|             username=request.user, | ||||
|             agent=agent.hostname, | ||||
|             object_type="agent", | ||||
|             action="agent_install", | ||||
|             object_type=AuditObjType.AGENT, | ||||
|             action=AuditActionType.AGENT_INSTALL, | ||||
|             message=f"{request.user} installed new agent {agent.hostname}", | ||||
|             after_value=Agent.serialize(agent), | ||||
|             debug_info={"ip": request._client_ip}, | ||||
| @@ -426,7 +506,10 @@ class Installer(APIView): | ||||
|             return notify_error("Invalid data") | ||||
|  | ||||
|         ver = request.data["version"] | ||||
|         if pyver.parse(ver) < pyver.parse(settings.LATEST_AGENT_VER): | ||||
|         if ( | ||||
|             pyver.parse(ver) < pyver.parse(settings.LATEST_AGENT_VER) | ||||
|             and not "-dev" in settings.LATEST_AGENT_VER | ||||
|         ): | ||||
|             return notify_error( | ||||
|                 f"Old installer detected (version {ver} ). Latest version is {settings.LATEST_AGENT_VER} Please generate a new installer from the RMM" | ||||
|             ) | ||||
| @@ -461,7 +544,7 @@ class ChocoResult(APIView): | ||||
|  | ||||
|         action.details["output"] = results | ||||
|         action.details["installed"] = installed | ||||
|         action.status = "completed" | ||||
|         action.status = PAStatus.COMPLETED | ||||
|         action.save(update_fields=["details", "status"]) | ||||
|         return Response("ok") | ||||
|  | ||||
| @@ -471,8 +554,9 @@ class AgentHistoryResult(APIView): | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def patch(self, request, agentid, pk): | ||||
|         _ = get_object_or_404(Agent, agent_id=agentid) | ||||
|         hist = get_object_or_404(AgentHistory, pk=pk) | ||||
|         hist = get_object_or_404( | ||||
|             AgentHistory.objects.filter(agent__agent_id=agentid), pk=pk | ||||
|         ) | ||||
|         s = AgentHistorySerializer(instance=hist, data=request.data, partial=True) | ||||
|         s.is_valid(raise_exception=True) | ||||
|         s.save() | ||||
|   | ||||
| @@ -1,7 +1,21 @@ | ||||
| from agents.models import Agent | ||||
| from core.models import CoreSettings | ||||
| from typing import TYPE_CHECKING, Any, Dict, List, Optional | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.db import models | ||||
|  | ||||
| from agents.models import Agent | ||||
| from clients.models import Client, Site | ||||
| from logs.models import BaseAuditModel | ||||
| from tacticalrmm.constants import ( | ||||
|     CORESETTINGS_CACHE_KEY, | ||||
|     AgentMonType, | ||||
|     AgentPlat, | ||||
|     CheckType, | ||||
| ) | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from autotasks.models import AutomatedTask | ||||
|     from checks.models import Check | ||||
|  | ||||
|  | ||||
| class Policy(BaseAuditModel): | ||||
| @@ -26,119 +40,186 @@ class Policy(BaseAuditModel): | ||||
|         "agents.Agent", related_name="policy_exclusions", blank=True | ||||
|     ) | ||||
|  | ||||
|     def save(self, *args, **kwargs): | ||||
|     def save(self, *args: Any, **kwargs: Any) -> None: | ||||
|         from alerts.tasks import cache_agents_alert_template | ||||
|         from automation.tasks import generate_agent_checks_task | ||||
|  | ||||
|         # get old policy if exists | ||||
|         old_policy = type(self).objects.get(pk=self.pk) if self.pk else None | ||||
|         old_policy: Optional[Policy] = ( | ||||
|             type(self).objects.get(pk=self.pk) if self.pk else None | ||||
|         ) | ||||
|         super(Policy, self).save(old_model=old_policy, *args, **kwargs) | ||||
|  | ||||
|         # generate agent checks only if active and enforced were changed | ||||
|         # check if alert template was changes and cache on agents | ||||
|         if old_policy: | ||||
|             if old_policy.active != self.active or old_policy.enforced != self.enforced: | ||||
|                 generate_agent_checks_task.delay( | ||||
|                     policy=self.pk, | ||||
|                     create_tasks=True, | ||||
|                 ) | ||||
|  | ||||
|             if old_policy.alert_template != self.alert_template: | ||||
|                 cache_agents_alert_template.delay() | ||||
|             elif self.alert_template and old_policy.active != self.active: | ||||
|                 cache_agents_alert_template.delay() | ||||
|  | ||||
|             if old_policy.active != self.active or old_policy.enforced != self.enforced: | ||||
|                 cache.delete(CORESETTINGS_CACHE_KEY) | ||||
|                 cache.delete_many_pattern("site_workstation_*") | ||||
|                 cache.delete_many_pattern("site_server_*") | ||||
|                 cache.delete_many_pattern("agent_*") | ||||
|  | ||||
|     def delete(self, *args, **kwargs): | ||||
|         from automation.tasks import generate_agent_checks_task | ||||
|         cache.delete(CORESETTINGS_CACHE_KEY) | ||||
|         cache.delete_many_pattern("site_workstation_*") | ||||
|         cache.delete_many_pattern("site_server_*") | ||||
|         cache.delete_many_pattern("agent_*") | ||||
|  | ||||
|         agents = list(self.related_agents().only("pk").values_list("pk", flat=True)) | ||||
|         super(Policy, self).delete(*args, **kwargs) | ||||
|         super(Policy, self).delete( | ||||
|             *args, | ||||
|             **kwargs, | ||||
|         ) | ||||
|  | ||||
|         generate_agent_checks_task.delay(agents=agents, create_tasks=True) | ||||
|  | ||||
|     def __str__(self): | ||||
|     def __str__(self) -> str: | ||||
|         return self.name | ||||
|  | ||||
|     @property | ||||
|     def is_default_server_policy(self): | ||||
|         return self.default_server_policy.exists()  # type: ignore | ||||
|     def is_default_server_policy(self) -> bool: | ||||
|         return self.default_server_policy.exists() | ||||
|  | ||||
|     @property | ||||
|     def is_default_workstation_policy(self): | ||||
|         return self.default_workstation_policy.exists()  # type: ignore | ||||
|     def is_default_workstation_policy(self) -> bool: | ||||
|         return self.default_workstation_policy.exists() | ||||
|  | ||||
|     def is_agent_excluded(self, agent): | ||||
|     def is_agent_excluded(self, agent: "Agent") -> bool: | ||||
|         return ( | ||||
|             agent in self.excluded_agents.all() | ||||
|             or agent.site in self.excluded_sites.all() | ||||
|             or agent.client in self.excluded_clients.all() | ||||
|         ) | ||||
|  | ||||
|     def related_agents(self): | ||||
|         return self.get_related("server") | self.get_related("workstation") | ||||
|     def related_agents( | ||||
|         self, mon_type: Optional[str] = None | ||||
|     ) -> "models.QuerySet[Agent]": | ||||
|         models.prefetch_related_objects( | ||||
|             [self], | ||||
|             "excluded_agents", | ||||
|             "excluded_sites", | ||||
|             "excluded_clients", | ||||
|             "workstation_clients", | ||||
|             "server_clients", | ||||
|             "workstation_sites", | ||||
|             "server_sites", | ||||
|             "agents", | ||||
|         ) | ||||
|  | ||||
|     def get_related(self, mon_type): | ||||
|         explicit_agents = ( | ||||
|             self.agents.filter(monitoring_type=mon_type)  # type: ignore | ||||
|             .exclude( | ||||
|                 pk__in=self.excluded_agents.only("pk").values_list("pk", flat=True) | ||||
|         agent_filter = {} | ||||
|         filtered_agents_ids = Agent.objects.none() | ||||
|  | ||||
|         if mon_type: | ||||
|             agent_filter["monitoring_type"] = mon_type | ||||
|  | ||||
|         excluded_clients_ids = self.excluded_clients.only("pk").values_list( | ||||
|             "id", flat=True | ||||
|         ) | ||||
|         excluded_sites_ids = self.excluded_sites.only("pk").values_list("id", flat=True) | ||||
|         excluded_agents_ids = self.excluded_agents.only("pk").values_list( | ||||
|             "id", flat=True | ||||
|         ) | ||||
|  | ||||
|         if self.is_default_server_policy: | ||||
|             filtered_agents_ids |= ( | ||||
|                 Agent.objects.exclude(block_policy_inheritance=True) | ||||
|                 .exclude(site__block_policy_inheritance=True) | ||||
|                 .exclude(site__client__block_policy_inheritance=True) | ||||
|                 .exclude(id__in=excluded_agents_ids) | ||||
|                 .exclude(site_id__in=excluded_sites_ids) | ||||
|                 .exclude(site__client_id__in=excluded_clients_ids) | ||||
|                 .filter(monitoring_type=AgentMonType.SERVER) | ||||
|                 .only("id") | ||||
|                 .values_list("id", flat=True) | ||||
|             ) | ||||
|             .exclude(site__in=self.excluded_sites.all()) | ||||
|             .exclude(site__client__in=self.excluded_clients.all()) | ||||
|  | ||||
|         if self.is_default_workstation_policy: | ||||
|             filtered_agents_ids |= ( | ||||
|                 Agent.objects.exclude(block_policy_inheritance=True) | ||||
|                 .exclude(site__block_policy_inheritance=True) | ||||
|                 .exclude(site__client__block_policy_inheritance=True) | ||||
|                 .exclude(id__in=excluded_agents_ids) | ||||
|                 .exclude(site_id__in=excluded_sites_ids) | ||||
|                 .exclude(site__client_id__in=excluded_clients_ids) | ||||
|                 .filter(monitoring_type=AgentMonType.WORKSTATION) | ||||
|                 .only("id") | ||||
|                 .values_list("id", flat=True) | ||||
|             ) | ||||
|  | ||||
|         # if this is the default policy for servers and workstations and skip the other calculations | ||||
|         if self.is_default_server_policy and self.is_default_workstation_policy: | ||||
|             return Agent.objects.filter(models.Q(id__in=filtered_agents_ids)) | ||||
|  | ||||
|         explicit_agents = ( | ||||
|             self.agents.filter(**agent_filter)  # type: ignore | ||||
|             .exclude(id__in=excluded_agents_ids) | ||||
|             .exclude(site_id__in=excluded_sites_ids) | ||||
|             .exclude(site__client_id__in=excluded_clients_ids) | ||||
|         ) | ||||
|  | ||||
|         explicit_clients = getattr(self, f"{mon_type}_clients").exclude( | ||||
|             pk__in=self.excluded_clients.all() | ||||
|         ) | ||||
|         explicit_sites = getattr(self, f"{mon_type}_sites").exclude( | ||||
|             pk__in=self.excluded_sites.all() | ||||
|         ) | ||||
|         explicit_clients_qs = Client.objects.none() | ||||
|         explicit_sites_qs = Site.objects.none() | ||||
|  | ||||
|         filtered_agents_pks = Policy.objects.none() | ||||
|         if not mon_type or mon_type == AgentMonType.WORKSTATION: | ||||
|             explicit_clients_qs |= self.workstation_clients.exclude(  # type: ignore | ||||
|                 id__in=excluded_clients_ids | ||||
|             ) | ||||
|             explicit_sites_qs |= self.workstation_sites.exclude(  # type: ignore | ||||
|                 id__in=excluded_sites_ids | ||||
|             ) | ||||
|  | ||||
|         filtered_agents_pks |= ( | ||||
|         if not mon_type or mon_type == AgentMonType.SERVER: | ||||
|             explicit_clients_qs |= self.server_clients.exclude(  # type: ignore | ||||
|                 id__in=excluded_clients_ids | ||||
|             ) | ||||
|             explicit_sites_qs |= self.server_sites.exclude(  # type: ignore | ||||
|                 id__in=excluded_sites_ids | ||||
|             ) | ||||
|  | ||||
|         filtered_agents_ids |= ( | ||||
|             Agent.objects.exclude(block_policy_inheritance=True) | ||||
|             .filter( | ||||
|                 site__in=[ | ||||
|                     site | ||||
|                     for site in explicit_sites | ||||
|                     if site.client not in explicit_clients | ||||
|                     and site.client not in self.excluded_clients.all() | ||||
|                 site_id__in=[ | ||||
|                     site.id | ||||
|                     for site in explicit_sites_qs | ||||
|                     if site.client not in explicit_clients_qs | ||||
|                     and site.client.id not in excluded_clients_ids | ||||
|                 ], | ||||
|                 monitoring_type=mon_type, | ||||
|                 **agent_filter, | ||||
|             ) | ||||
|             .values_list("pk", flat=True) | ||||
|             .only("id") | ||||
|             .values_list("id", flat=True) | ||||
|         ) | ||||
|  | ||||
|         filtered_agents_pks |= ( | ||||
|         filtered_agents_ids |= ( | ||||
|             Agent.objects.exclude(block_policy_inheritance=True) | ||||
|             .exclude(site__block_policy_inheritance=True) | ||||
|             .filter( | ||||
|                 site__client__in=[client for client in explicit_clients], | ||||
|                 monitoring_type=mon_type, | ||||
|                 site__client__in=explicit_clients_qs, | ||||
|                 **agent_filter, | ||||
|             ) | ||||
|             .values_list("pk", flat=True) | ||||
|             .only("id") | ||||
|             .values_list("id", flat=True) | ||||
|         ) | ||||
|  | ||||
|         return Agent.objects.filter( | ||||
|             models.Q(pk__in=filtered_agents_pks) | ||||
|             | models.Q(pk__in=explicit_agents.only("pk")) | ||||
|             models.Q(id__in=filtered_agents_ids) | ||||
|             | models.Q(id__in=explicit_agents.only("id")) | ||||
|         ) | ||||
|  | ||||
|     @staticmethod | ||||
|     def serialize(policy): | ||||
|     def serialize(policy: "Policy") -> Dict[str, Any]: | ||||
|         # serializes the policy and returns json | ||||
|         from .serializers import PolicyAuditSerializer | ||||
|  | ||||
|         return PolicyAuditSerializer(policy).data | ||||
|  | ||||
|     @staticmethod | ||||
|     def cascade_policy_tasks(agent): | ||||
|     def get_policy_tasks(agent: "Agent") -> "List[AutomatedTask]": | ||||
|  | ||||
|         # List of all tasks to be applied | ||||
|         tasks = list() | ||||
|  | ||||
|         agent_tasks_parent_pks = [ | ||||
|             task.parent_task for task in agent.autotasks.filter(managed_by_policy=True) | ||||
|         ] | ||||
|  | ||||
|         # Get policies applied to agent and agent site and client | ||||
|         policies = agent.get_agent_policies() | ||||
|  | ||||
| @@ -150,36 +231,13 @@ class Policy(BaseAuditModel): | ||||
|                 for task in policy.autotasks.all(): | ||||
|                     tasks.append(task) | ||||
|  | ||||
|         # remove policy tasks from agent not included in policy | ||||
|         for task in agent.autotasks.filter( | ||||
|             parent_task__in=[ | ||||
|                 taskpk | ||||
|                 for taskpk in agent_tasks_parent_pks | ||||
|                 if taskpk not in [task.pk for task in tasks] | ||||
|             ] | ||||
|         ): | ||||
|             if task.sync_status == "initial": | ||||
|                 task.delete() | ||||
|             else: | ||||
|                 task.sync_status = "pendingdeletion" | ||||
|                 task.save() | ||||
|  | ||||
|         # change tasks from pendingdeletion to notsynced if policy was added or changed | ||||
|         agent.autotasks.filter(sync_status="pendingdeletion").filter( | ||||
|             parent_task__in=[taskpk for taskpk in [task.pk for task in tasks]] | ||||
|         ).update(sync_status="notsynced") | ||||
|  | ||||
|         return [task for task in tasks if task.pk not in agent_tasks_parent_pks] | ||||
|         return tasks | ||||
|  | ||||
|     @staticmethod | ||||
|     def cascade_policy_checks(agent): | ||||
|         # Get checks added to agent directly | ||||
|         agent_checks = list(agent.agentchecks.filter(managed_by_policy=False)) | ||||
|     def get_policy_checks(agent: "Agent") -> "List[Check]": | ||||
|  | ||||
|         agent_checks_parent_pks = [ | ||||
|             check.parent_check | ||||
|             for check in agent.agentchecks.filter(managed_by_policy=True) | ||||
|         ] | ||||
|         # Get checks added to agent directly | ||||
|         agent_checks = list(agent.agentchecks.all()) | ||||
|  | ||||
|         # Get policies applied to agent and agent site and client | ||||
|         policies = agent.get_agent_policies() | ||||
| @@ -201,71 +259,82 @@ class Policy(BaseAuditModel): | ||||
|                     for check in policy.policychecks.all(): | ||||
|                         policy_checks.append(check) | ||||
|  | ||||
|         # Sorted Checks already added | ||||
|         added_diskspace_checks = list() | ||||
|         added_ping_checks = list() | ||||
|         added_winsvc_checks = list() | ||||
|         added_script_checks = list() | ||||
|         added_eventlog_checks = list() | ||||
|         added_cpuload_checks = list() | ||||
|         added_memory_checks = list() | ||||
|         if not enforced_checks and not policy_checks: | ||||
|             return [] | ||||
|  | ||||
|         # Lists all agent and policy checks that will be created | ||||
|         diskspace_checks = list() | ||||
|         ping_checks = list() | ||||
|         winsvc_checks = list() | ||||
|         script_checks = list() | ||||
|         eventlog_checks = list() | ||||
|         cpuload_checks = list() | ||||
|         memory_checks = list() | ||||
|         # Sorted Checks already added | ||||
|         added_diskspace_checks: List[str] = list() | ||||
|         added_ping_checks: List[str] = list() | ||||
|         added_winsvc_checks: List[str] = list() | ||||
|         added_script_checks: List[int] = list() | ||||
|         added_eventlog_checks: List[List[str]] = list() | ||||
|         added_cpuload_checks: List[int] = list() | ||||
|         added_memory_checks: List[int] = list() | ||||
|  | ||||
|         # Lists all agent and policy checks that will be returned | ||||
|         diskspace_checks: "List[Check]" = list() | ||||
|         ping_checks: "List[Check]" = list() | ||||
|         winsvc_checks: "List[Check]" = list() | ||||
|         script_checks: "List[Check]" = list() | ||||
|         eventlog_checks: "List[Check]" = list() | ||||
|         cpuload_checks: "List[Check]" = list() | ||||
|         memory_checks: "List[Check]" = list() | ||||
|  | ||||
|         overridden_checks: List[int] = list() | ||||
|  | ||||
|         # Loop over checks in with enforced policies first, then non-enforced policies | ||||
|         for check in enforced_checks + agent_checks + policy_checks: | ||||
|             if check.check_type == "diskspace" and agent.plat == "windows": | ||||
|             if ( | ||||
|                 check.check_type == CheckType.DISK_SPACE | ||||
|                 and agent.plat == AgentPlat.WINDOWS | ||||
|             ): | ||||
|                 # Check if drive letter was already added | ||||
|                 if check.disk not in added_diskspace_checks: | ||||
|                     added_diskspace_checks.append(check.disk) | ||||
|                     # Dont create the check if it is an agent check | ||||
|                     # Dont add if check if it is an agent check | ||||
|                     if not check.agent: | ||||
|                         diskspace_checks.append(check) | ||||
|                 elif check.agent: | ||||
|                     check.overriden_by_policy = True | ||||
|                     check.save() | ||||
|                     overridden_checks.append(check.pk) | ||||
|  | ||||
|             if check.check_type == "ping": | ||||
|             elif check.check_type == CheckType.PING: | ||||
|                 # Check if IP/host was already added | ||||
|                 if check.ip not in added_ping_checks: | ||||
|                     added_ping_checks.append(check.ip) | ||||
|                     # Dont create the check if it is an agent check | ||||
|                     # Dont add if the check if it is an agent check | ||||
|                     if not check.agent: | ||||
|                         ping_checks.append(check) | ||||
|                 elif check.agent: | ||||
|                     check.overriden_by_policy = True | ||||
|                     check.save() | ||||
|                     overridden_checks.append(check.pk) | ||||
|  | ||||
|             if check.check_type == "cpuload" and agent.plat == "windows": | ||||
|             elif ( | ||||
|                 check.check_type == CheckType.CPU_LOAD | ||||
|                 and agent.plat == AgentPlat.WINDOWS | ||||
|             ): | ||||
|                 # Check if cpuload list is empty | ||||
|                 if not added_cpuload_checks: | ||||
|                     added_cpuload_checks.append(check) | ||||
|                     added_cpuload_checks.append(check.pk) | ||||
|                     # Dont create the check if it is an agent check | ||||
|                     if not check.agent: | ||||
|                         cpuload_checks.append(check) | ||||
|                 elif check.agent: | ||||
|                     check.overriden_by_policy = True | ||||
|                     check.save() | ||||
|                     overridden_checks.append(check.pk) | ||||
|  | ||||
|             if check.check_type == "memory" and agent.plat == "windows": | ||||
|             elif ( | ||||
|                 check.check_type == CheckType.MEMORY and agent.plat == AgentPlat.WINDOWS | ||||
|             ): | ||||
|                 # Check if memory check list is empty | ||||
|                 if not added_memory_checks: | ||||
|                     added_memory_checks.append(check) | ||||
|                     added_memory_checks.append(check.pk) | ||||
|                     # Dont create the check if it is an agent check | ||||
|                     if not check.agent: | ||||
|                         memory_checks.append(check) | ||||
|                 elif check.agent: | ||||
|                     check.overriden_by_policy = True | ||||
|                     check.save() | ||||
|                     overridden_checks.append(check.pk) | ||||
|  | ||||
|             if check.check_type == "winsvc" and agent.plat == "windows": | ||||
|             elif ( | ||||
|                 check.check_type == CheckType.WINSVC and agent.plat == AgentPlat.WINDOWS | ||||
|             ): | ||||
|                 # Check if service name was already added | ||||
|                 if check.svc_name not in added_winsvc_checks: | ||||
|                     added_winsvc_checks.append(check.svc_name) | ||||
| @@ -273,10 +342,9 @@ class Policy(BaseAuditModel): | ||||
|                     if not check.agent: | ||||
|                         winsvc_checks.append(check) | ||||
|                 elif check.agent: | ||||
|                     check.overriden_by_policy = True | ||||
|                     check.save() | ||||
|                     overridden_checks.append(check.pk) | ||||
|  | ||||
|             if check.check_type == "script" and agent.is_supported_script( | ||||
|             elif check.check_type == CheckType.SCRIPT and agent.is_supported_script( | ||||
|                 check.script.supported_platforms | ||||
|             ): | ||||
|                 # Check if script id was already added | ||||
| @@ -286,20 +354,28 @@ class Policy(BaseAuditModel): | ||||
|                     if not check.agent: | ||||
|                         script_checks.append(check) | ||||
|                 elif check.agent: | ||||
|                     check.overriden_by_policy = True | ||||
|                     check.save() | ||||
|                     overridden_checks.append(check.pk) | ||||
|  | ||||
|             if check.check_type == "eventlog" and agent.plat == "windows": | ||||
|             elif ( | ||||
|                 check.check_type == CheckType.EVENT_LOG | ||||
|                 and agent.plat == AgentPlat.WINDOWS | ||||
|             ): | ||||
|                 # Check if events were already added | ||||
|                 if [check.log_name, check.event_id] not in added_eventlog_checks: | ||||
|                     added_eventlog_checks.append([check.log_name, check.event_id]) | ||||
|                     if not check.agent: | ||||
|                         eventlog_checks.append(check) | ||||
|                 elif check.agent: | ||||
|                     check.overriden_by_policy = True | ||||
|                     check.save() | ||||
|                     overridden_checks.append(check.pk) | ||||
|  | ||||
|         final_list = ( | ||||
|             if overridden_checks: | ||||
|                 from checks.models import Check | ||||
|  | ||||
|                 Check.objects.filter(pk__in=overridden_checks).update( | ||||
|                     overridden_by_policy=True | ||||
|                 ) | ||||
|  | ||||
|         return ( | ||||
|             diskspace_checks | ||||
|             + ping_checks | ||||
|             + cpuload_checks | ||||
| @@ -308,33 +384,3 @@ class Policy(BaseAuditModel): | ||||
|             + script_checks | ||||
|             + eventlog_checks | ||||
|         ) | ||||
|  | ||||
|         # remove policy checks from agent that fell out of policy scope | ||||
|         agent.agentchecks.filter( | ||||
|             managed_by_policy=True, | ||||
|             parent_check__in=[ | ||||
|                 checkpk | ||||
|                 for checkpk in agent_checks_parent_pks | ||||
|                 if checkpk not in [check.pk for check in final_list] | ||||
|             ], | ||||
|         ).delete() | ||||
|  | ||||
|         return [ | ||||
|             check for check in final_list if check.pk not in agent_checks_parent_pks | ||||
|         ] | ||||
|  | ||||
|     @staticmethod | ||||
|     def generate_policy_checks(agent): | ||||
|         checks = Policy.cascade_policy_checks(agent) | ||||
|  | ||||
|         if checks: | ||||
|             for check in checks: | ||||
|                 check.create_policy_check(agent) | ||||
|  | ||||
|     @staticmethod | ||||
|     def generate_policy_tasks(agent): | ||||
|         tasks = Policy.cascade_policy_tasks(agent) | ||||
|  | ||||
|         if tasks: | ||||
|             for task in tasks: | ||||
|                 task.create_policy_task(agent) | ||||
|   | ||||
| @@ -4,7 +4,7 @@ from tacticalrmm.permissions import _has_perm | ||||
|  | ||||
|  | ||||
| class AutomationPolicyPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         if r.method == "GET": | ||||
|             return _has_perm(r, "can_list_automation_policies") | ||||
|         else: | ||||
|   | ||||
| @@ -1,13 +1,14 @@ | ||||
| from agents.serializers import AgentHostnameSerializer | ||||
| from autotasks.models import AutomatedTask | ||||
| from checks.models import Check | ||||
| from clients.models import Client | ||||
| from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer | ||||
| from rest_framework.serializers import ( | ||||
|     ModelSerializer, | ||||
|     ReadOnlyField, | ||||
|     SerializerMethodField, | ||||
| ) | ||||
|  | ||||
| from agents.serializers import AgentHostnameSerializer | ||||
| from autotasks.models import TaskResult | ||||
| from checks.models import CheckResult | ||||
| from clients.models import Client | ||||
| from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
|  | ||||
| from .models import Policy | ||||
| @@ -95,7 +96,7 @@ class PolicyCheckStatusSerializer(ModelSerializer): | ||||
|     hostname = ReadOnlyField(source="agent.hostname") | ||||
|  | ||||
|     class Meta: | ||||
|         model = Check | ||||
|         model = CheckResult | ||||
|         fields = "__all__" | ||||
|  | ||||
|  | ||||
| @@ -103,7 +104,7 @@ class PolicyTaskStatusSerializer(ModelSerializer): | ||||
|     hostname = ReadOnlyField(source="agent.hostname") | ||||
|  | ||||
|     class Meta: | ||||
|         model = AutomatedTask | ||||
|         model = TaskResult | ||||
|         fields = "__all__" | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -1,155 +1,20 @@ | ||||
| from typing import Any, Dict, List, Union | ||||
|  | ||||
| from tacticalrmm.celery import app | ||||
|  | ||||
|  | ||||
| @app.task(retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5}) | ||||
| def generate_agent_checks_task( | ||||
|     policy: int = None, | ||||
|     site: int = None, | ||||
|     client: int = None, | ||||
|     agents: List[int] = list(), | ||||
|     all: bool = False, | ||||
|     create_tasks: bool = False, | ||||
| ) -> Union[str, None]: | ||||
|     from agents.models import Agent | ||||
|     from automation.models import Policy | ||||
|  | ||||
|     p = Policy.objects.get(pk=policy) if policy else None | ||||
|  | ||||
|     # generate checks on all agents if all is specified or if policy is default server/workstation policy | ||||
|     if (p and p.is_default_server_policy and p.is_default_workstation_policy) or all: | ||||
|         a = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type") | ||||
|  | ||||
|     # generate checks on all servers if policy is a default servers policy | ||||
|     elif p and p.is_default_server_policy: | ||||
|         a = Agent.objects.filter(monitoring_type="server").only("pk", "monitoring_type") | ||||
|  | ||||
|     # generate checks on all workstations if policy is a default workstations policy | ||||
|     elif p and p.is_default_workstation_policy: | ||||
|         a = Agent.objects.filter(monitoring_type="workstation").only( | ||||
|             "pk", "monitoring_type" | ||||
|         ) | ||||
|  | ||||
|     # generate checks on a list of supplied agents | ||||
|     elif agents: | ||||
|         a = Agent.objects.filter(pk__in=agents) | ||||
|  | ||||
|     # generate checks on agents affected by supplied policy | ||||
|     elif policy: | ||||
|         a = p.related_agents().only("pk") | ||||
|  | ||||
|     # generate checks that has specified site | ||||
|     elif site: | ||||
|         a = Agent.objects.filter(site_id=site) | ||||
|  | ||||
|     # generate checks that has specified client | ||||
|     elif client: | ||||
|         a = Agent.objects.filter(site__client_id=client) | ||||
|     else: | ||||
|         a = [] | ||||
|  | ||||
|     for agent in a: | ||||
|         agent.generate_checks_from_policies() | ||||
|         if create_tasks: | ||||
|             agent.generate_tasks_from_policies() | ||||
|  | ||||
|         agent.set_alert_template() | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task( | ||||
|     acks_late=True, retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5} | ||||
| ) | ||||
| # updates policy managed check fields on agents | ||||
| def update_policy_check_fields_task(check: int) -> str: | ||||
|     from checks.models import Check | ||||
|  | ||||
|     c: Check = Check.objects.get(pk=check) | ||||
|     update_fields: Dict[Any, Any] = {} | ||||
|  | ||||
|     for field in c.policy_fields_to_copy: | ||||
|         update_fields[field] = getattr(c, field) | ||||
|  | ||||
|     Check.objects.filter(parent_check=check).update(**update_fields) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task(retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5}) | ||||
| # generates policy tasks on agents affected by a policy | ||||
| def generate_agent_autotasks_task(policy: int = None) -> str: | ||||
|     from agents.models import Agent | ||||
|     from automation.models import Policy | ||||
|  | ||||
|     p: Policy = Policy.objects.get(pk=policy) | ||||
|  | ||||
|     if p and p.is_default_server_policy and p.is_default_workstation_policy: | ||||
|         agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type") | ||||
|     elif p and p.is_default_server_policy: | ||||
|         agents = Agent.objects.filter(monitoring_type="server").only( | ||||
|             "pk", "monitoring_type" | ||||
|         ) | ||||
|     elif p and p.is_default_workstation_policy: | ||||
|         agents = Agent.objects.filter(monitoring_type="workstation").only( | ||||
|             "pk", "monitoring_type" | ||||
|         ) | ||||
|     else: | ||||
|         agents = p.related_agents().only("pk") | ||||
|  | ||||
|     for agent in agents: | ||||
|         agent.generate_tasks_from_policies() | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task( | ||||
|     acks_late=True, | ||||
|     retry_backoff=5, | ||||
|     retry_jitter=True, | ||||
|     retry_kwargs={"max_retries": 5}, | ||||
| ) | ||||
| def delete_policy_autotasks_task(task: int) -> str: | ||||
|     from autotasks.models import AutomatedTask | ||||
|  | ||||
|     for t in AutomatedTask.objects.filter(parent_task=task): | ||||
|         t.delete_task_on_agent() | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def run_win_policy_autotasks_task(task: int) -> str: | ||||
|     from autotasks.models import AutomatedTask | ||||
|  | ||||
|     for t in AutomatedTask.objects.filter(parent_task=task): | ||||
|         t.run_win_task() | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task( | ||||
|     acks_late=True, | ||||
|     retry_backoff=5, | ||||
|     retry_jitter=True, | ||||
|     retry_kwargs={"max_retries": 5}, | ||||
| ) | ||||
| def update_policy_autotasks_fields_task(task: int, update_agent: bool = False) -> str: | ||||
|     from autotasks.models import AutomatedTask | ||||
|  | ||||
|     t = AutomatedTask.objects.get(pk=task) | ||||
|     update_fields: Dict[str, Any] = {} | ||||
|  | ||||
|     for field in t.policy_fields_to_copy: | ||||
|         update_fields[field] = getattr(t, field) | ||||
|  | ||||
|     AutomatedTask.objects.filter(parent_task=task).update(**update_fields) | ||||
|  | ||||
|     if update_agent: | ||||
|         for t in AutomatedTask.objects.filter(parent_task=task).exclude( | ||||
|             sync_status="initial" | ||||
|         ): | ||||
|             t.modify_task_on_agent() | ||||
|     try: | ||||
|         policy_task = AutomatedTask.objects.get(pk=task) | ||||
|     except AutomatedTask.DoesNotExist: | ||||
|         return "AutomatedTask not found" | ||||
|  | ||||
|     if not policy_task.policy: | ||||
|         return "AutomatedTask must be a policy" | ||||
|  | ||||
|     # get related agents from policy | ||||
|     for agent in policy_task.policy.related_agents(): | ||||
|         policy_task.run_win_task(agent) | ||||
|  | ||||
|     return "ok" | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,6 +1,7 @@ | ||||
| from django.urls import path | ||||
|  | ||||
| from autotasks.views import GetAddAutoTasks | ||||
| from checks.views import GetAddChecks | ||||
| from django.urls import path | ||||
|  | ||||
| from . import views | ||||
|  | ||||
| @@ -9,7 +10,6 @@ urlpatterns = [ | ||||
|     path("policies/<int:pk>/related/", views.GetRelated.as_view()), | ||||
|     path("policies/overview/", views.OverviewPolicy.as_view()), | ||||
|     path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()), | ||||
|     path("sync/", views.PolicySync.as_view()), | ||||
|     # alias to get policy checks | ||||
|     path("policies/<int:policy>/checks/", GetAddChecks.as_view()), | ||||
|     # alias to get policy tasks | ||||
|   | ||||
| @@ -1,18 +1,17 @@ | ||||
| from agents.models import Agent | ||||
| from autotasks.models import AutomatedTask | ||||
| from checks.models import Check | ||||
| from clients.models import Client | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from rest_framework.exceptions import PermissionDenied | ||||
| from rest_framework.permissions import IsAuthenticated | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from agents.models import Agent | ||||
| from autotasks.models import TaskResult | ||||
| from checks.models import CheckResult | ||||
| from clients.models import Client | ||||
| from tacticalrmm.permissions import _has_perm_on_client, _has_perm_on_site | ||||
| from winupdate.models import WinUpdatePolicy | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
|  | ||||
| from tacticalrmm.permissions import _has_perm_on_client, _has_perm_on_site | ||||
| from tacticalrmm.utils import notify_error | ||||
|  | ||||
| from .models import Policy | ||||
| from .permissions import AutomationPolicyPerms | ||||
| from .serializers import ( | ||||
| @@ -29,7 +28,9 @@ class GetAddPolicies(APIView): | ||||
|     permission_classes = [IsAuthenticated, AutomationPolicyPerms] | ||||
|  | ||||
|     def get(self, request): | ||||
|         policies = Policy.objects.all() | ||||
|         policies = Policy.objects.select_related("alert_template").prefetch_related( | ||||
|             "excluded_agents", "excluded_sites", "excluded_clients" | ||||
|         ) | ||||
|  | ||||
|         return Response( | ||||
|             PolicyTableSerializer( | ||||
| @@ -51,9 +52,9 @@ class GetAddPolicies(APIView): | ||||
|                 check.create_policy_check(policy=policy) | ||||
|  | ||||
|             tasks = copyPolicy.autotasks.all() | ||||
|  | ||||
|             for task in tasks: | ||||
|                 task.create_policy_task(policy=policy) | ||||
|                 if not task.assigned_check: | ||||
|                     task.create_policy_task(policy=policy) | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
| @@ -67,22 +68,12 @@ class GetUpdateDeletePolicy(APIView): | ||||
|         return Response(PolicySerializer(policy).data) | ||||
|  | ||||
|     def put(self, request, pk): | ||||
|         from .tasks import generate_agent_checks_task | ||||
|  | ||||
|         policy = get_object_or_404(Policy, pk=pk) | ||||
|  | ||||
|         serializer = PolicySerializer(instance=policy, data=request.data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save() | ||||
|  | ||||
|         # check for excluding objects and in the request and if present generate policies | ||||
|         if ( | ||||
|             "excluded_sites" in request.data.keys() | ||||
|             or "excluded_clients" in request.data.keys() | ||||
|             or "excluded_agents" in request.data.keys() | ||||
|         ): | ||||
|             generate_agent_checks_task.delay(policy=pk, create_tasks=True) | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|     def delete(self, request, pk): | ||||
| @@ -91,25 +82,11 @@ class GetUpdateDeletePolicy(APIView): | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class PolicySync(APIView): | ||||
|     def post(self, request): | ||||
|         if "policy" in request.data.keys(): | ||||
|             from automation.tasks import generate_agent_checks_task | ||||
|  | ||||
|             generate_agent_checks_task.delay( | ||||
|                 policy=request.data["policy"], create_tasks=True | ||||
|             ) | ||||
|             return Response("ok") | ||||
|  | ||||
|         else: | ||||
|             return notify_error("The request was invalid") | ||||
|  | ||||
|  | ||||
| class PolicyAutoTask(APIView): | ||||
|  | ||||
|     # get status of all tasks | ||||
|     def get(self, request, task): | ||||
|         tasks = AutomatedTask.objects.filter(parent_task=task) | ||||
|         tasks = TaskResult.objects.filter(task=task) | ||||
|         return Response(PolicyTaskStatusSerializer(tasks, many=True).data) | ||||
|  | ||||
|     # bulk run win tasks associated with policy | ||||
| @@ -124,14 +101,16 @@ class PolicyCheck(APIView): | ||||
|     permission_classes = [IsAuthenticated, AutomationPolicyPerms] | ||||
|  | ||||
|     def get(self, request, check): | ||||
|         checks = Check.objects.filter(parent_check=check) | ||||
|         checks = CheckResult.objects.filter(assigned_check=check) | ||||
|         return Response(PolicyCheckStatusSerializer(checks, many=True).data) | ||||
|  | ||||
|  | ||||
| class OverviewPolicy(APIView): | ||||
|     def get(self, request): | ||||
|  | ||||
|         clients = Client.objects.all() | ||||
|         clients = Client.objects.filter_by_role(request.user).select_related( | ||||
|             "workstation_policy", "server_policy" | ||||
|         ) | ||||
|         return Response(PolicyOverviewSerializer(clients, many=True).data) | ||||
|  | ||||
|  | ||||
| @@ -162,7 +141,7 @@ class UpdatePatchPolicy(APIView): | ||||
|  | ||||
|         serializer = WinUpdatePolicySerializer(data=request.data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.policy = policy  # type: ignore | ||||
|         serializer.policy = policy | ||||
|         serializer.save() | ||||
|  | ||||
|         return Response("ok") | ||||
| @@ -195,7 +174,7 @@ class ResetPatchPolicy(APIView): | ||||
|                 raise PermissionDenied() | ||||
|  | ||||
|             agents = ( | ||||
|                 Agent.objects.filter_by_role(request.user) | ||||
|                 Agent.objects.filter_by_role(request.user)  # type: ignore | ||||
|                 .prefetch_related("winupdatepolicy") | ||||
|                 .filter(site__client_id=request.data["client"]) | ||||
|             ) | ||||
| @@ -204,13 +183,13 @@ class ResetPatchPolicy(APIView): | ||||
|                 raise PermissionDenied() | ||||
|  | ||||
|             agents = ( | ||||
|                 Agent.objects.filter_by_role(request.user) | ||||
|                 Agent.objects.filter_by_role(request.user)  # type: ignore | ||||
|                 .prefetch_related("winupdatepolicy") | ||||
|                 .filter(site_id=request.data["site"]) | ||||
|             ) | ||||
|         else: | ||||
|             agents = ( | ||||
|                 Agent.objects.filter_by_role(request.user) | ||||
|                 Agent.objects.filter_by_role(request.user)  # type: ignore | ||||
|                 .prefetch_related("winupdatepolicy") | ||||
|                 .only("pk") | ||||
|             ) | ||||
|   | ||||
| @@ -1,5 +1,6 @@ | ||||
| from django.contrib import admin | ||||
|  | ||||
| from .models import AutomatedTask | ||||
| from .models import AutomatedTask, TaskResult | ||||
|  | ||||
| admin.site.register(AutomatedTask) | ||||
| admin.site.register(TaskResult) | ||||
|   | ||||
| @@ -1,10 +1,5 @@ | ||||
| from itertools import cycle | ||||
|  | ||||
| from model_bakery.recipe import Recipe, foreign_key, seq | ||||
|  | ||||
| script = Recipe("scripts.script") | ||||
| from model_bakery.recipe import Recipe | ||||
|  | ||||
| task = Recipe( | ||||
|     "autotasks.AutomatedTask", | ||||
|     script=foreign_key(script), | ||||
| ) | ||||
|   | ||||
| @@ -1,16 +1,13 @@ | ||||
| from agents.models import Agent | ||||
| from autotasks.tasks import remove_orphaned_win_tasks | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from autotasks.tasks import remove_orphaned_win_tasks | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Checks for orphaned tasks on all agents and removes them" | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|         agents = Agent.objects.only("pk", "last_seen", "overdue_time", "offline_time") | ||||
|         online = [i for i in agents if i.status == "online"] | ||||
|         for agent in online: | ||||
|             remove_orphaned_win_tasks.delay(agent.pk) | ||||
|         remove_orphaned_win_tasks.s() | ||||
|  | ||||
|         self.stdout.write( | ||||
|             self.style.SUCCESS( | ||||
|   | ||||
| @@ -0,0 +1,99 @@ | ||||
| # Generated by Django 3.2.12 on 2022-04-01 22:44 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("checks", "0025_auto_20210917_1954"), | ||||
|         ("agents", "0046_alter_agenthistory_command"), | ||||
|         ("autotasks", "0029_alter_automatedtask_task_type"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name="automatedtask", | ||||
|             name="retvalue", | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="automatedtask", | ||||
|             name="assigned_check", | ||||
|             field=models.ForeignKey( | ||||
|                 blank=True, | ||||
|                 null=True, | ||||
|                 on_delete=django.db.models.deletion.SET_NULL, | ||||
|                 related_name="assignedtasks", | ||||
|                 to="checks.check", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="automatedtask", | ||||
|             name="win_task_name", | ||||
|             field=models.CharField(blank=True, max_length=255, null=True), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="TaskResult", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         auto_created=True, | ||||
|                         primary_key=True, | ||||
|                         serialize=False, | ||||
|                         verbose_name="ID", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("retcode", models.IntegerField(blank=True, null=True)), | ||||
|                 ("stdout", models.TextField(blank=True, null=True)), | ||||
|                 ("stderr", models.TextField(blank=True, null=True)), | ||||
|                 ("execution_time", models.CharField(default="0.0000", max_length=100)), | ||||
|                 ("last_run", models.DateTimeField(blank=True, null=True)), | ||||
|                 ( | ||||
|                     "status", | ||||
|                     models.CharField( | ||||
|                         choices=[ | ||||
|                             ("passing", "Passing"), | ||||
|                             ("failing", "Failing"), | ||||
|                             ("pending", "Pending"), | ||||
|                         ], | ||||
|                         default="pending", | ||||
|                         max_length=30, | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "sync_status", | ||||
|                     models.CharField( | ||||
|                         choices=[ | ||||
|                             ("synced", "Synced With Agent"), | ||||
|                             ("notsynced", "Waiting On Agent Checkin"), | ||||
|                             ("pendingdeletion", "Pending Deletion on Agent"), | ||||
|                             ("initial", "Initial Task Sync"), | ||||
|                         ], | ||||
|                         default="initial", | ||||
|                         max_length=100, | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "agent", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         related_name="taskresults", | ||||
|                         to="agents.agent", | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "task", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         related_name="taskresults", | ||||
|                         to="autotasks.automatedtask", | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             options={ | ||||
|                 "unique_together": {("agent", "task")}, | ||||
|             }, | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,50 @@ | ||||
| # Generated by Django 3.2.12 on 2022-04-01 22:49 | ||||
|  | ||||
| from django.db import migrations, transaction | ||||
| from django.db.utils import IntegrityError | ||||
|  | ||||
|  | ||||
| def migrate_task_results(apps, schema_editor): | ||||
|     AutomatedTask = apps.get_model("autotasks", "AutomatedTask") | ||||
|     TaskResult = apps.get_model("autotasks", "TaskResult") | ||||
|     for task in AutomatedTask.objects.exclude(agent=None): | ||||
|  | ||||
|         try: | ||||
|             with transaction.atomic(): | ||||
|                 if task.managed_by_policy: | ||||
|                     TaskResult.objects.create( | ||||
|                         task_id=task.parent_task, | ||||
|                         agent_id=task.agent_id, | ||||
|                         retcode=task.retcode, | ||||
|                         stdout=task.stdout, | ||||
|                         stderr=task.stderr, | ||||
|                         execution_time=task.execution_time, | ||||
|                         last_run=task.last_run, | ||||
|                         status=task.status, | ||||
|                         sync_status=task.sync_status, | ||||
|                     ) | ||||
|                 else: | ||||
|                     TaskResult.objects.create( | ||||
|                         task_id=task.id, | ||||
|                         agent_id=task.agent.id, | ||||
|                         retcode=task.retcode, | ||||
|                         stdout=task.stdout, | ||||
|                         stderr=task.stderr, | ||||
|                         execution_time=task.execution_time, | ||||
|                         last_run=task.last_run, | ||||
|                         status=task.status, | ||||
|                         sync_status=task.sync_status, | ||||
|                     ) | ||||
|         except IntegrityError: | ||||
|             continue | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     atomic = False | ||||
|     dependencies = [ | ||||
|         ("autotasks", "0030_auto_20220401_2244"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython(migrate_task_results), | ||||
|     ] | ||||
| @@ -0,0 +1,45 @@ | ||||
| # Generated by Django 3.2.12 on 2022-04-01 23:01 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0031_auto_20220401_2249'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='execution_time', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='last_run', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='parent_task', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='retcode', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='status', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='stderr', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='stdout', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='sync_status', | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,53 @@ | ||||
| # Generated by Django 3.2.12 on 2022-04-02 00:41 | ||||
|  | ||||
| from django.db import migrations | ||||
| from django.utils.timezone import make_aware | ||||
|  | ||||
| from tacticalrmm.constants import TaskType | ||||
|  | ||||
|  | ||||
| def migrate_script_data(apps, schema_editor): | ||||
|     AutomatedTask = apps.get_model("autotasks", "AutomatedTask") | ||||
|     # convert autotask to the new format | ||||
|     for task in AutomatedTask.objects.all(): | ||||
|         try: | ||||
|             edited = False | ||||
|  | ||||
|             # convert scheduled task_type | ||||
|             if task.task_type == TaskType.SCHEDULED: | ||||
|                 task.task_type = TaskType.DAILY | ||||
|                 task.run_time_date = make_aware(task.run_time_minute.strptime("%H:%M")) | ||||
|                 task.daily_interval = 1 | ||||
|                 edited = True | ||||
|  | ||||
|             # convert actions | ||||
|             if not task.actions: | ||||
|                 if not task.script: | ||||
|                     task.delete() | ||||
|  | ||||
|                 task.actions = [ | ||||
|                     { | ||||
|                         "type": "script", | ||||
|                         "script": task.script.pk, | ||||
|                         "script_args": task.script_args, | ||||
|                         "timeout": task.timeout, | ||||
|                         "name": task.script.name, | ||||
|                     } | ||||
|                 ] | ||||
|                 edited = True | ||||
|  | ||||
|             if edited: | ||||
|                 task.save() | ||||
|         except: | ||||
|             continue | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("autotasks", "0032_auto_20220401_2301"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython(migrate_script_data), | ||||
|     ] | ||||
| @@ -0,0 +1,25 @@ | ||||
| # Generated by Django 3.2.12 on 2022-04-02 00:46 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0033_auto_20220402_0041'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='script', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='script_args', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='timeout', | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,39 @@ | ||||
| # Generated by Django 4.0.3 on 2022-04-15 18:18 | ||||
|  | ||||
| from django.db import migrations | ||||
| from django.db.models import Count | ||||
|  | ||||
| from autotasks.models import generate_task_name | ||||
| from tacticalrmm.constants import TaskSyncStatus | ||||
|  | ||||
|  | ||||
| def check_for_win_task_name_duplicates(apps, schema_editor): | ||||
|     AutomatedTask = apps.get_model("autotasks", "AutomatedTask") | ||||
|     TaskResult = apps.get_model("autotasks", "TaskResult") | ||||
|  | ||||
|     duplicate_tasks = ( | ||||
|         AutomatedTask.objects.values("win_task_name") | ||||
|         .annotate(records=Count("win_task_name")) | ||||
|         .filter(records__gt=1) | ||||
|     ) | ||||
|     for task in duplicate_tasks: | ||||
|         dups = list(AutomatedTask.objects.filter(win_task_name=task["win_task_name"])) | ||||
|         for x in range(task["records"] - 1): | ||||
|  | ||||
|             dups[x].win_task_name = generate_task_name() | ||||
|             dups[x].save(update_fields=["win_task_name"]) | ||||
|             # update task_result sync status | ||||
|             TaskResult.objects.filter(task=dups[x]).update( | ||||
|                 sync_status=TaskSyncStatus.NOT_SYNCED | ||||
|             ) | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("autotasks", "0034_auto_20220402_0046"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython(check_for_win_task_name_duplicates), | ||||
|     ] | ||||
| @@ -0,0 +1,20 @@ | ||||
| # Generated by Django 4.0.3 on 2022-04-15 20:52 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
| import autotasks.models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0035_auto_20220415_1818'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='automatedtask', | ||||
|             name='win_task_name', | ||||
|             field=models.CharField(blank=True, default=autotasks.models.generate_task_name, max_length=255, unique=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 4.0.5 on 2022-06-29 07:57 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0036_alter_automatedtask_win_task_name'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='taskresult', | ||||
|             name='retcode', | ||||
|             field=models.BigIntegerField(blank=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,20 +1,34 @@ | ||||
| import asyncio | ||||
| import datetime as dt | ||||
| import random | ||||
| import string | ||||
| from typing import List | ||||
| from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union | ||||
|  | ||||
| import pytz | ||||
| from alerts.models import SEVERITY_CHOICES | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.core.cache import cache | ||||
| from django.core.validators import MaxValueValidator, MinValueValidator | ||||
| from django.db import models | ||||
| from django.db.models.fields import DateTimeField | ||||
| from django.db.models.fields.json import JSONField | ||||
| from django.db.utils import DatabaseError | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from core.utils import get_core_settings | ||||
| from logs.models import BaseAuditModel, DebugLog | ||||
| from packaging import version as pyver | ||||
| from tacticalrmm.constants import ( | ||||
|     FIELDS_TRIGGER_TASK_UPDATE_AGENT, | ||||
|     POLICY_TASK_FIELDS_TO_COPY, | ||||
|     AlertSeverity, | ||||
|     DebugLogType, | ||||
|     TaskStatus, | ||||
|     TaskSyncStatus, | ||||
|     TaskType, | ||||
| ) | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from automation.models import Policy | ||||
|     from alerts.models import Alert, AlertTemplate | ||||
|     from agents.models import Agent | ||||
|     from checks.models import Check | ||||
|  | ||||
| from tacticalrmm.models import PermissionQuerySet | ||||
| from tacticalrmm.utils import ( | ||||
| @@ -25,29 +39,10 @@ from tacticalrmm.utils import ( | ||||
|     convert_to_iso_duration, | ||||
| ) | ||||
|  | ||||
| TASK_TYPE_CHOICES = [ | ||||
|     ("daily", "Daily"), | ||||
|     ("weekly", "Weekly"), | ||||
|     ("monthly", "Monthly"), | ||||
|     ("monthlydow", "Monthly Day of Week"), | ||||
|     ("checkfailure", "On Check Failure"), | ||||
|     ("manual", "Manual"), | ||||
|     ("runonce", "Run Once"), | ||||
|     ("scheduled", "Scheduled"),  # deprecated | ||||
| ] | ||||
|  | ||||
| SYNC_STATUS_CHOICES = [ | ||||
|     ("synced", "Synced With Agent"), | ||||
|     ("notsynced", "Waiting On Agent Checkin"), | ||||
|     ("pendingdeletion", "Pending Deletion on Agent"), | ||||
|     ("initial", "Initial Task Sync"), | ||||
| ] | ||||
|  | ||||
| TASK_STATUS_CHOICES = [ | ||||
|     ("passing", "Passing"), | ||||
|     ("failing", "Failing"), | ||||
|     ("pending", "Pending"), | ||||
| ] | ||||
| def generate_task_name() -> str: | ||||
|     chars = string.ascii_letters | ||||
|     return "TacticalRMM_" + "".join(random.choice(chars) for i in range(35)) | ||||
|  | ||||
|  | ||||
| class AutomatedTask(BaseAuditModel): | ||||
| @@ -75,53 +70,21 @@ class AutomatedTask(BaseAuditModel): | ||||
|         on_delete=models.SET_NULL, | ||||
|     ) | ||||
|  | ||||
|     # deprecated | ||||
|     script = models.ForeignKey( | ||||
|         "scripts.Script", | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         related_name="autoscript", | ||||
|         on_delete=models.SET_NULL, | ||||
|     ) | ||||
|     # deprecated | ||||
|     script_args = ArrayField( | ||||
|         models.CharField(max_length=255, null=True, blank=True), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     # deprecated | ||||
|     timeout = models.PositiveIntegerField(blank=True, default=120) | ||||
|  | ||||
|     # format -> {"actions": [{"type": "script", "script": 1, "name": "Script Name", "timeout": 90, "script_args": []}, {"type": "cmd", "command": "whoami", "timeout": 90}]} | ||||
|     # format -> [{"type": "script", "script": 1, "name": "Script Name", "timeout": 90, "script_args": []}, {"type": "cmd", "command": "whoami", "timeout": 90}] | ||||
|     actions = JSONField(default=list) | ||||
|     assigned_check = models.ForeignKey( | ||||
|         "checks.Check", | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         related_name="assignedtask", | ||||
|         related_name="assignedtasks", | ||||
|         on_delete=models.SET_NULL, | ||||
|     ) | ||||
|     name = models.CharField(max_length=255) | ||||
|     collector_all_output = models.BooleanField(default=False) | ||||
|     managed_by_policy = models.BooleanField(default=False) | ||||
|     parent_task = models.PositiveIntegerField(null=True, blank=True) | ||||
|     retvalue = models.TextField(null=True, blank=True) | ||||
|     retcode = models.IntegerField(null=True, blank=True) | ||||
|     stdout = models.TextField(null=True, blank=True) | ||||
|     stderr = models.TextField(null=True, blank=True) | ||||
|     execution_time = models.CharField(max_length=100, default="0.0000") | ||||
|     last_run = models.DateTimeField(null=True, blank=True) | ||||
|     enabled = models.BooleanField(default=True) | ||||
|     continue_on_error = models.BooleanField(default=True) | ||||
|     status = models.CharField( | ||||
|         max_length=30, choices=TASK_STATUS_CHOICES, default="pending" | ||||
|     ) | ||||
|     sync_status = models.CharField( | ||||
|         max_length=100, choices=SYNC_STATUS_CHOICES, default="initial" | ||||
|     ) | ||||
|     alert_severity = models.CharField( | ||||
|         max_length=30, choices=SEVERITY_CHOICES, default="info" | ||||
|         max_length=30, choices=AlertSeverity.choices, default=AlertSeverity.INFO | ||||
|     ) | ||||
|     email_alert = models.BooleanField(default=False) | ||||
|     text_alert = models.BooleanField(default=False) | ||||
| @@ -130,9 +93,11 @@ class AutomatedTask(BaseAuditModel): | ||||
|     # options sent to agent for task creation | ||||
|     # general task settings | ||||
|     task_type = models.CharField( | ||||
|         max_length=100, choices=TASK_TYPE_CHOICES, default="manual" | ||||
|         max_length=100, choices=TaskType.choices, default=TaskType.MANUAL | ||||
|     ) | ||||
|     win_task_name = models.CharField(max_length=255, null=True, blank=True) | ||||
|     win_task_name = models.CharField( | ||||
|         max_length=255, unique=True, blank=True, default=generate_task_name | ||||
|     )  # should be changed to unique=True | ||||
|     run_time_date = DateTimeField(null=True, blank=True) | ||||
|     expire_date = DateTimeField(null=True, blank=True) | ||||
|  | ||||
| @@ -166,144 +131,89 @@ class AutomatedTask(BaseAuditModel): | ||||
|     run_asap_after_missed = models.BooleanField(default=False)  # added in agent v1.4.7 | ||||
|     task_instance_policy = models.PositiveSmallIntegerField(blank=True, default=1) | ||||
|  | ||||
|     def __str__(self): | ||||
|     # deprecated | ||||
|     managed_by_policy = models.BooleanField(default=False) | ||||
|  | ||||
|     # non-database property | ||||
|     task_result: "Union[TaskResult, Dict[None, None]]" = {} | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return self.name | ||||
|  | ||||
|     def save(self, *args, **kwargs): | ||||
|         from automation.tasks import update_policy_autotasks_fields_task | ||||
|         from autotasks.tasks import modify_win_task | ||||
|     def save(self, *args, **kwargs) -> None: | ||||
|  | ||||
|         # get old agent if exists | ||||
|         # if task is a policy task clear cache on everything | ||||
|         if self.policy: | ||||
|             cache.delete_many_pattern("site_*_tasks") | ||||
|             cache.delete_many_pattern("agent_*_tasks") | ||||
|  | ||||
|         # get old task if exists | ||||
|         old_task = AutomatedTask.objects.get(pk=self.pk) if self.pk else None | ||||
|         super(AutomatedTask, self).save(old_model=old_task, *args, **kwargs) | ||||
|  | ||||
|         # check if fields were updated that require a sync to the agent | ||||
|         update_agent = False | ||||
|         # check if fields were updated that require a sync to the agent and set status to notsynced | ||||
|         if old_task: | ||||
|             for field in self.fields_that_trigger_task_update_on_agent: | ||||
|                 if getattr(self, field) != getattr(old_task, field): | ||||
|                     update_agent = True | ||||
|                     break | ||||
|                     if self.policy: | ||||
|                         TaskResult.objects.exclude( | ||||
|                             sync_status=TaskSyncStatus.INITIAL | ||||
|                         ).filter(task__policy_id=self.policy.id).update( | ||||
|                             sync_status=TaskSyncStatus.NOT_SYNCED | ||||
|                         ) | ||||
|                     else: | ||||
|                         TaskResult.objects.filter(agent=self.agent, task=self).update( | ||||
|                             sync_status=TaskSyncStatus.NOT_SYNCED | ||||
|                         ) | ||||
|  | ||||
|         # check if automated task was enabled/disabled and send celery task | ||||
|         if old_task and old_task.agent and update_agent: | ||||
|             modify_win_task.delay(pk=self.pk) | ||||
|     def delete(self, *args, **kwargs): | ||||
|  | ||||
|         # check if policy task was edited and then check if it was a field worth copying to rest of agent tasks | ||||
|         elif old_task and old_task.policy: | ||||
|             if update_agent: | ||||
|                 update_policy_autotasks_fields_task.delay( | ||||
|                     task=self.pk, update_agent=update_agent | ||||
|                 ) | ||||
|             else: | ||||
|                 for field in self.policy_fields_to_copy: | ||||
|                     if getattr(self, field) != getattr(old_task, field): | ||||
|                         update_policy_autotasks_fields_task.delay(task=self.pk) | ||||
|                         break | ||||
|         # if task is a policy task clear cache on everything | ||||
|         if self.policy: | ||||
|             cache.delete_many_pattern("site_*_tasks") | ||||
|             cache.delete_many_pattern("agent_*_tasks") | ||||
|  | ||||
|         super(AutomatedTask, self).delete( | ||||
|             *args, | ||||
|             **kwargs, | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def schedule(self): | ||||
|         if self.task_type == "manual": | ||||
|     def schedule(self) -> Optional[str]: | ||||
|         if self.task_type == TaskType.MANUAL: | ||||
|             return "Manual" | ||||
|         elif self.task_type == "checkfailure": | ||||
|         elif self.task_type == TaskType.CHECK_FAILURE: | ||||
|             return "Every time check fails" | ||||
|         elif self.task_type == "runonce": | ||||
|         elif self.task_type == TaskType.RUN_ONCE: | ||||
|             return f'Run once on {self.run_time_date.strftime("%m/%d/%Y %I:%M%p")}' | ||||
|         elif self.task_type == "daily": | ||||
|         elif self.task_type == TaskType.DAILY: | ||||
|             run_time_nice = self.run_time_date.strftime("%I:%M%p") | ||||
|             if self.daily_interval == 1: | ||||
|                 return f"Daily at {run_time_nice}" | ||||
|             else: | ||||
|                 return f"Every {self.daily_interval} days at {run_time_nice}" | ||||
|         elif self.task_type == "weekly": | ||||
|         elif self.task_type == TaskType.WEEKLY: | ||||
|             run_time_nice = self.run_time_date.strftime("%I:%M%p") | ||||
|             days = bitdays_to_string(self.run_time_bit_weekdays) | ||||
|             if self.weekly_interval != 1: | ||||
|                 return f"{days} at {run_time_nice}" | ||||
|             else: | ||||
|                 return f"{days} at {run_time_nice} every {self.weekly_interval} weeks" | ||||
|         elif self.task_type == "monthly": | ||||
|         elif self.task_type == TaskType.MONTHLY: | ||||
|             run_time_nice = self.run_time_date.strftime("%I:%M%p") | ||||
|             months = bitmonths_to_string(self.monthly_months_of_year) | ||||
|             days = bitmonthdays_to_string(self.monthly_days_of_month) | ||||
|             return f"Runs on {months} on days {days} at {run_time_nice}" | ||||
|         elif self.task_type == "monthlydow": | ||||
|         elif self.task_type == TaskType.MONTHLY_DOW: | ||||
|             run_time_nice = self.run_time_date.strftime("%I:%M%p") | ||||
|             months = bitmonths_to_string(self.monthly_months_of_year) | ||||
|             weeks = bitweeks_to_string(self.monthly_weeks_of_month) | ||||
|             days = bitdays_to_string(self.run_time_bit_weekdays) | ||||
|             return f"Runs on {months} on {weeks} on {days} at {run_time_nice}" | ||||
|  | ||||
|     @property | ||||
|     def last_run_as_timezone(self): | ||||
|         if self.last_run is not None and self.agent is not None: | ||||
|             return self.last_run.astimezone( | ||||
|                 pytz.timezone(self.agent.timezone) | ||||
|             ).strftime("%b-%d-%Y - %H:%M") | ||||
|  | ||||
|         return self.last_run | ||||
|  | ||||
|     # These fields will be duplicated on the agent tasks that are managed by a policy | ||||
|     @property | ||||
|     def policy_fields_to_copy(self) -> List[str]: | ||||
|         return [ | ||||
|             "alert_severity", | ||||
|             "email_alert", | ||||
|             "text_alert", | ||||
|             "dashboard_alert", | ||||
|             "assigned_check", | ||||
|             "name", | ||||
|             "actions", | ||||
|             "run_time_bit_weekdays", | ||||
|             "run_time_date", | ||||
|             "expire_date", | ||||
|             "daily_interval", | ||||
|             "weekly_interval", | ||||
|             "task_type", | ||||
|             "win_task_name", | ||||
|             "enabled", | ||||
|             "remove_if_not_scheduled", | ||||
|             "run_asap_after_missed", | ||||
|             "custom_field", | ||||
|             "collector_all_output", | ||||
|             "monthly_days_of_month", | ||||
|             "monthly_months_of_year", | ||||
|             "monthly_weeks_of_month", | ||||
|             "task_repetition_duration", | ||||
|             "task_repetition_interval", | ||||
|             "stop_task_at_duration_end", | ||||
|             "random_task_delay", | ||||
|             "run_asap_after_missed", | ||||
|             "task_instance_policy", | ||||
|             "continue_on_error", | ||||
|         ] | ||||
|  | ||||
|     @property | ||||
|     def fields_that_trigger_task_update_on_agent(self) -> List[str]: | ||||
|         return [ | ||||
|             "run_time_bit_weekdays", | ||||
|             "run_time_date", | ||||
|             "expire_date", | ||||
|             "daily_interval", | ||||
|             "weekly_interval", | ||||
|             "enabled", | ||||
|             "remove_if_not_scheduled", | ||||
|             "run_asap_after_missed", | ||||
|             "monthly_days_of_month", | ||||
|             "monthly_months_of_year", | ||||
|             "monthly_weeks_of_month", | ||||
|             "task_repetition_duration", | ||||
|             "task_repetition_interval", | ||||
|             "stop_task_at_duration_end", | ||||
|             "random_task_delay", | ||||
|             "run_asap_after_missed", | ||||
|             "task_instance_policy", | ||||
|         ] | ||||
|  | ||||
|     @staticmethod | ||||
|     def generate_task_name(): | ||||
|         chars = string.ascii_letters | ||||
|         return "TacticalRMM_" + "".join(random.choice(chars) for i in range(35)) | ||||
|         return FIELDS_TRIGGER_TASK_UPDATE_AGENT | ||||
|  | ||||
|     @staticmethod | ||||
|     def serialize(task): | ||||
| @@ -312,53 +222,35 @@ class AutomatedTask(BaseAuditModel): | ||||
|  | ||||
|         return TaskAuditSerializer(task).data | ||||
|  | ||||
|     def create_policy_task(self, agent=None, policy=None, assigned_check=None): | ||||
|  | ||||
|         # added to allow new policy tasks to be assigned to check only when the agent check exists already | ||||
|         if ( | ||||
|             self.assigned_check | ||||
|             and agent | ||||
|             and agent.agentchecks.filter(parent_check=self.assigned_check.id).exists() | ||||
|         ): | ||||
|             assigned_check = agent.agentchecks.get(parent_check=self.assigned_check.id) | ||||
|  | ||||
|         # if policy is present, then this task is being copied to another policy | ||||
|         # if agent is present, then this task is being created on an agent from a policy | ||||
|         # exit if neither are set or if both are set | ||||
|         # also exit if assigned_check is set because this task will be created when the check is | ||||
|         if ( | ||||
|             (not agent and not policy) | ||||
|             or (agent and policy) | ||||
|             or (self.assigned_check and not assigned_check) | ||||
|         ): | ||||
|             return | ||||
|     def create_policy_task( | ||||
|         self, policy: "Policy", assigned_check: "Optional[Check]" = None | ||||
|     ) -> None: | ||||
|         ### Copies certain properties on this task (self) to a new task and sets it to the supplied Policy | ||||
|         fields_to_copy = POLICY_TASK_FIELDS_TO_COPY | ||||
|  | ||||
|         task = AutomatedTask.objects.create( | ||||
|             agent=agent, | ||||
|             policy=policy, | ||||
|             managed_by_policy=bool(agent), | ||||
|             parent_task=(self.pk if agent else None), | ||||
|             assigned_check=assigned_check, | ||||
|         ) | ||||
|  | ||||
|         for field in self.policy_fields_to_copy: | ||||
|             if field != "assigned_check": | ||||
|                 setattr(task, field, getattr(self, field)) | ||||
|         for field in fields_to_copy: | ||||
|             setattr(task, field, getattr(self, field)) | ||||
|  | ||||
|         task.save() | ||||
|  | ||||
|         if agent: | ||||
|             task.create_task_on_agent() | ||||
|  | ||||
|     # agent version >= 1.8.0 | ||||
|     def generate_nats_task_payload(self, editing=False): | ||||
|     def generate_nats_task_payload( | ||||
|         self, agent: "Optional[Agent]" = None, editing: bool = False | ||||
|     ) -> Dict[str, Any]: | ||||
|         task = { | ||||
|             "pk": self.pk, | ||||
|             "type": "rmm", | ||||
|             "name": self.win_task_name, | ||||
|             "overwrite_task": editing, | ||||
|             "enabled": self.enabled, | ||||
|             "trigger": self.task_type if self.task_type != "checkfailure" else "manual", | ||||
|             "trigger": self.task_type | ||||
|             if self.task_type != TaskType.CHECK_FAILURE | ||||
|             else TaskType.MANUAL, | ||||
|             "multiple_instances": self.task_instance_policy | ||||
|             if self.task_instance_policy | ||||
|             else 0, | ||||
| @@ -366,11 +258,29 @@ class AutomatedTask(BaseAuditModel): | ||||
|             if self.expire_date | ||||
|             else False, | ||||
|             "start_when_available": self.run_asap_after_missed | ||||
|             if self.task_type != "runonce" | ||||
|             if self.task_type != TaskType.RUN_ONCE | ||||
|             else True, | ||||
|         } | ||||
|  | ||||
|         if self.task_type in ["runonce", "daily", "weekly", "monthly", "monthlydow"]: | ||||
|         if self.task_type in [ | ||||
|             TaskType.RUN_ONCE, | ||||
|             TaskType.DAILY, | ||||
|             TaskType.WEEKLY, | ||||
|             TaskType.MONTHLY, | ||||
|             TaskType.MONTHLY_DOW, | ||||
|         ]: | ||||
|             # set runonce task in future if creating and run_asap_after_missed is set | ||||
|             if ( | ||||
|                 not editing | ||||
|                 and self.task_type == TaskType.RUN_ONCE | ||||
|                 and self.run_asap_after_missed | ||||
|                 and agent | ||||
|                 and self.run_time_date | ||||
|                 < djangotime.now().astimezone(pytz.timezone(agent.timezone)) | ||||
|             ): | ||||
|                 self.run_time_date = ( | ||||
|                     djangotime.now() + djangotime.timedelta(minutes=5) | ||||
|                 ).astimezone(pytz.timezone(agent.timezone)) | ||||
|  | ||||
|             task["start_year"] = int(self.run_time_date.strftime("%Y")) | ||||
|             task["start_month"] = int(self.run_time_date.strftime("%-m")) | ||||
| @@ -397,14 +307,14 @@ class AutomatedTask(BaseAuditModel): | ||||
|                 ) | ||||
|                 task["stop_at_duration_end"] = self.stop_task_at_duration_end | ||||
|  | ||||
|             if self.task_type == "daily": | ||||
|             if self.task_type == TaskType.DAILY: | ||||
|                 task["day_interval"] = self.daily_interval | ||||
|  | ||||
|             elif self.task_type == "weekly": | ||||
|             elif self.task_type == TaskType.WEEKLY: | ||||
|                 task["week_interval"] = self.weekly_interval | ||||
|                 task["days_of_week"] = self.run_time_bit_weekdays | ||||
|  | ||||
|             elif self.task_type == "monthly": | ||||
|             elif self.task_type == TaskType.MONTHLY: | ||||
|  | ||||
|                 # check if "last day is configured" | ||||
|                 if self.monthly_days_of_month >= 0x80000000: | ||||
| @@ -416,222 +326,152 @@ class AutomatedTask(BaseAuditModel): | ||||
|  | ||||
|                 task["months_of_year"] = self.monthly_months_of_year | ||||
|  | ||||
|             elif self.task_type == "monthlydow": | ||||
|             elif self.task_type == TaskType.MONTHLY_DOW: | ||||
|                 task["days_of_week"] = self.run_time_bit_weekdays | ||||
|                 task["months_of_year"] = self.monthly_months_of_year | ||||
|                 task["weeks_of_month"] = self.monthly_weeks_of_month | ||||
|  | ||||
|         return task | ||||
|  | ||||
|     def create_task_on_agent(self): | ||||
|         from agents.models import Agent | ||||
|  | ||||
|         agent = ( | ||||
|             Agent.objects.filter(pk=self.agent.pk) | ||||
|             .only("pk", "version", "hostname", "agent_id") | ||||
|             .get() | ||||
|         ) | ||||
|  | ||||
|         if pyver.parse(agent.version) >= pyver.parse("1.8.0"): | ||||
|             nats_data = { | ||||
|                 "func": "schedtask", | ||||
|                 "schedtaskpayload": self.generate_nats_task_payload(), | ||||
|             } | ||||
|     def create_task_on_agent(self, agent: "Optional[Agent]" = None) -> str: | ||||
|         if self.policy and not agent: | ||||
|             return "agent parameter needs to be passed with policy task" | ||||
|         else: | ||||
|             agent = agent if self.policy else self.agent | ||||
|  | ||||
|             if self.task_type == "scheduled": | ||||
|                 nats_data = { | ||||
|                     "func": "schedtask", | ||||
|                     "schedtaskpayload": { | ||||
|                         "type": "rmm", | ||||
|                         "trigger": "weekly", | ||||
|                         "weekdays": self.run_time_bit_weekdays, | ||||
|                         "pk": self.pk, | ||||
|                         "name": self.win_task_name, | ||||
|                         "hour": dt.datetime.strptime( | ||||
|                             self.run_time_minute, "%H:%M" | ||||
|                         ).hour, | ||||
|                         "min": dt.datetime.strptime( | ||||
|                             self.run_time_minute, "%H:%M" | ||||
|                         ).minute, | ||||
|                     }, | ||||
|                 } | ||||
|         try: | ||||
|             task_result = TaskResult.objects.get(agent=agent, task=self) | ||||
|         except TaskResult.DoesNotExist: | ||||
|             task_result = TaskResult(agent=agent, task=self) | ||||
|             task_result.save() | ||||
|  | ||||
|             elif self.task_type == "runonce": | ||||
|                 # check if scheduled time is in the past | ||||
|                 agent_tz = pytz.timezone(agent.timezone) | ||||
|                 task_time_utc = self.run_time_date.replace(tzinfo=agent_tz).astimezone( | ||||
|                     pytz.utc | ||||
|                 ) | ||||
|                 now = djangotime.now() | ||||
|                 if task_time_utc < now: | ||||
|                     self.run_time_date = now.astimezone(agent_tz).replace( | ||||
|                         tzinfo=pytz.utc | ||||
|                     ) + djangotime.timedelta(minutes=5) | ||||
|                     self.save(update_fields=["run_time_date"]) | ||||
|         nats_data = { | ||||
|             "func": "schedtask", | ||||
|             "schedtaskpayload": self.generate_nats_task_payload(agent), | ||||
|         } | ||||
|  | ||||
|                 nats_data = { | ||||
|                     "func": "schedtask", | ||||
|                     "schedtaskpayload": { | ||||
|                         "type": "rmm", | ||||
|                         "trigger": "once", | ||||
|                         "pk": self.pk, | ||||
|                         "name": self.win_task_name, | ||||
|                         "year": int(dt.datetime.strftime(self.run_time_date, "%Y")), | ||||
|                         "month": dt.datetime.strftime(self.run_time_date, "%B"), | ||||
|                         "day": int(dt.datetime.strftime(self.run_time_date, "%d")), | ||||
|                         "hour": int(dt.datetime.strftime(self.run_time_date, "%H")), | ||||
|                         "min": int(dt.datetime.strftime(self.run_time_date, "%M")), | ||||
|                     }, | ||||
|                 } | ||||
|  | ||||
|                 if self.run_asap_after_missed: | ||||
|                     nats_data["schedtaskpayload"]["run_asap_after_missed"] = True | ||||
|  | ||||
|                 if self.remove_if_not_scheduled: | ||||
|                     nats_data["schedtaskpayload"]["deleteafter"] = True | ||||
|  | ||||
|             elif self.task_type == "checkfailure" or self.task_type == "manual": | ||||
|                 nats_data = { | ||||
|                     "func": "schedtask", | ||||
|                     "schedtaskpayload": { | ||||
|                         "type": "rmm", | ||||
|                         "trigger": "manual", | ||||
|                         "pk": self.pk, | ||||
|                         "name": self.win_task_name, | ||||
|                     }, | ||||
|                 } | ||||
|             else: | ||||
|                 return "error" | ||||
|  | ||||
|         r = asyncio.run(agent.nats_cmd(nats_data, timeout=5)) | ||||
|         r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=5)) | ||||
|  | ||||
|         if r != "ok": | ||||
|             self.sync_status = "initial" | ||||
|             self.save(update_fields=["sync_status"]) | ||||
|             task_result.sync_status = TaskSyncStatus.INITIAL | ||||
|             task_result.save(update_fields=["sync_status"]) | ||||
|             DebugLog.warning( | ||||
|                 agent=agent, | ||||
|                 log_type="agent_issues", | ||||
|                 message=f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in.", | ||||
|                 log_type=DebugLogType.AGENT_ISSUES, | ||||
|                 message=f"Unable to create scheduled task {self.name} on {task_result.agent.hostname}. It will be created when the agent checks in.", | ||||
|             ) | ||||
|             return "timeout" | ||||
|         else: | ||||
|             self.sync_status = "synced" | ||||
|             self.save(update_fields=["sync_status"]) | ||||
|             task_result.sync_status = TaskSyncStatus.SYNCED | ||||
|             task_result.save(update_fields=["sync_status"]) | ||||
|             DebugLog.info( | ||||
|                 agent=agent, | ||||
|                 log_type="agent_issues", | ||||
|                 message=f"{agent.hostname} task {self.name} was successfully created", | ||||
|                 log_type=DebugLogType.AGENT_ISSUES, | ||||
|                 message=f"{task_result.agent.hostname} task {self.name} was successfully created", | ||||
|             ) | ||||
|  | ||||
|         return "ok" | ||||
|  | ||||
|     def modify_task_on_agent(self): | ||||
|         from agents.models import Agent | ||||
|  | ||||
|         agent = ( | ||||
|             Agent.objects.filter(pk=self.agent.pk) | ||||
|             .only("pk", "version", "hostname", "agent_id") | ||||
|             .get() | ||||
|         ) | ||||
|  | ||||
|         if pyver.parse(agent.version) >= pyver.parse("1.8.0"): | ||||
|             nats_data = { | ||||
|                 "func": "schedtask", | ||||
|                 "schedtaskpayload": self.generate_nats_task_payload(editing=True), | ||||
|             } | ||||
|     def modify_task_on_agent(self, agent: "Optional[Agent]" = None) -> str: | ||||
|         if self.policy and not agent: | ||||
|             return "agent parameter needs to be passed with policy task" | ||||
|         else: | ||||
|             nats_data = { | ||||
|                 "func": "enableschedtask", | ||||
|                 "schedtaskpayload": { | ||||
|                     "name": self.win_task_name, | ||||
|                     "enabled": self.enabled, | ||||
|                 }, | ||||
|             } | ||||
|         r = asyncio.run(agent.nats_cmd(nats_data, timeout=5)) | ||||
|             agent = agent if self.policy else self.agent | ||||
|  | ||||
|         try: | ||||
|             task_result = TaskResult.objects.get(agent=agent, task=self) | ||||
|         except TaskResult.DoesNotExist: | ||||
|             task_result = TaskResult(agent=agent, task=self) | ||||
|             task_result.save() | ||||
|  | ||||
|         nats_data = { | ||||
|             "func": "schedtask", | ||||
|             "schedtaskpayload": self.generate_nats_task_payload(editing=True), | ||||
|         } | ||||
|  | ||||
|         r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=5)) | ||||
|  | ||||
|         if r != "ok": | ||||
|             self.sync_status = "notsynced" | ||||
|             self.save(update_fields=["sync_status"]) | ||||
|             task_result.sync_status = TaskSyncStatus.NOT_SYNCED | ||||
|             task_result.save(update_fields=["sync_status"]) | ||||
|             DebugLog.warning( | ||||
|                 agent=agent, | ||||
|                 log_type="agent_issues", | ||||
|                 message=f"Unable to modify scheduled task {self.name} on {agent.hostname}({agent.pk}). It will try again on next agent checkin", | ||||
|                 log_type=DebugLogType.AGENT_ISSUES, | ||||
|                 message=f"Unable to modify scheduled task {self.name} on {task_result.agent.hostname}({task_result.agent.agent_id}). It will try again on next agent checkin", | ||||
|             ) | ||||
|             return "timeout" | ||||
|         else: | ||||
|             self.sync_status = "synced" | ||||
|             self.save(update_fields=["sync_status"]) | ||||
|             task_result.sync_status = TaskSyncStatus.SYNCED | ||||
|             task_result.save(update_fields=["sync_status"]) | ||||
|             DebugLog.info( | ||||
|                 agent=agent, | ||||
|                 log_type="agent_issues", | ||||
|                 message=f"{agent.hostname} task {self.name} was successfully modified", | ||||
|                 log_type=DebugLogType.AGENT_ISSUES, | ||||
|                 message=f"{task_result.agent.hostname} task {self.name} was successfully modified", | ||||
|             ) | ||||
|  | ||||
|         return "ok" | ||||
|  | ||||
|     def delete_task_on_agent(self): | ||||
|         from agents.models import Agent | ||||
|     def delete_task_on_agent(self, agent: "Optional[Agent]" = None) -> str: | ||||
|         if self.policy and not agent: | ||||
|             return "agent parameter needs to be passed with policy task" | ||||
|         else: | ||||
|             agent = agent if self.policy else self.agent | ||||
|  | ||||
|         agent = ( | ||||
|             Agent.objects.filter(pk=self.agent.pk) | ||||
|             .only("pk", "version", "hostname", "agent_id") | ||||
|             .get() | ||||
|         ) | ||||
|         try: | ||||
|             task_result = TaskResult.objects.get(agent=agent, task=self) | ||||
|         except TaskResult.DoesNotExist: | ||||
|             task_result = TaskResult(agent=agent, task=self) | ||||
|             task_result.save() | ||||
|  | ||||
|         nats_data = { | ||||
|             "func": "delschedtask", | ||||
|             "schedtaskpayload": {"name": self.win_task_name}, | ||||
|         } | ||||
|         r = asyncio.run(agent.nats_cmd(nats_data, timeout=10)) | ||||
|         r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=10)) | ||||
|  | ||||
|         if r != "ok" and "The system cannot find the file specified" not in r: | ||||
|             self.sync_status = "pendingdeletion" | ||||
|             task_result.sync_status = TaskSyncStatus.PENDING_DELETION | ||||
|  | ||||
|             try: | ||||
|                 self.save(update_fields=["sync_status"]) | ||||
|                 task_result.save(update_fields=["sync_status"]) | ||||
|             except DatabaseError: | ||||
|                 pass | ||||
|  | ||||
|             DebugLog.warning( | ||||
|                 agent=agent, | ||||
|                 log_type="agent_issues", | ||||
|                 message=f"{agent.hostname} task {self.name} will be deleted on next checkin", | ||||
|                 log_type=DebugLogType.AGENT_ISSUES, | ||||
|                 message=f"{task_result.agent.hostname} task {self.name} will be deleted on next checkin", | ||||
|             ) | ||||
|             return "timeout" | ||||
|         else: | ||||
|             self.delete() | ||||
|             DebugLog.info( | ||||
|                 agent=agent, | ||||
|                 log_type="agent_issues", | ||||
|                 message=f"{agent.hostname}({agent.pk}) task {self.name} was deleted", | ||||
|                 log_type=DebugLogType.AGENT_ISSUES, | ||||
|                 message=f"{task_result.agent.hostname}({task_result.agent.agent_id}) task {self.name} was deleted", | ||||
|             ) | ||||
|  | ||||
|         return "ok" | ||||
|  | ||||
|     def run_win_task(self): | ||||
|         from agents.models import Agent | ||||
|     def run_win_task(self, agent: "Optional[Agent]" = None) -> str: | ||||
|         if self.policy and not agent: | ||||
|             return "agent parameter needs to be passed with policy task" | ||||
|         else: | ||||
|             agent = agent if self.policy else self.agent | ||||
|  | ||||
|         agent = ( | ||||
|             Agent.objects.filter(pk=self.agent.pk) | ||||
|             .only("pk", "version", "hostname", "agent_id") | ||||
|             .get() | ||||
|         try: | ||||
|             task_result = TaskResult.objects.get(agent=agent, task=self) | ||||
|         except TaskResult.DoesNotExist: | ||||
|             task_result = TaskResult(agent=agent, task=self) | ||||
|             task_result.save() | ||||
|  | ||||
|         asyncio.run( | ||||
|             task_result.agent.nats_cmd( | ||||
|                 {"func": "runtask", "taskpk": self.pk}, wait=False | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|         asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False)) | ||||
|         return "ok" | ||||
|  | ||||
|     def save_collector_results(self): | ||||
|  | ||||
|         agent_field = self.custom_field.get_or_create_field_value(self.agent) | ||||
|  | ||||
|         value = ( | ||||
|             self.stdout.strip() | ||||
|             if self.collector_all_output | ||||
|             else self.stdout.strip().split("\n")[-1].strip() | ||||
|         ) | ||||
|         agent_field.save_to_field(value) | ||||
|  | ||||
|     def should_create_alert(self, alert_template=None): | ||||
|         return ( | ||||
|             self.dashboard_alert | ||||
| @@ -647,10 +487,64 @@ class AutomatedTask(BaseAuditModel): | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     def send_email(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
| class TaskResult(models.Model): | ||||
|     class Meta: | ||||
|         unique_together = (("agent", "task"),) | ||||
|  | ||||
|     objects = PermissionQuerySet.as_manager() | ||||
|  | ||||
|     agent = models.ForeignKey( | ||||
|         "agents.Agent", | ||||
|         related_name="taskresults", | ||||
|         on_delete=models.CASCADE, | ||||
|     ) | ||||
|     task = models.ForeignKey( | ||||
|         "autotasks.AutomatedTask", | ||||
|         related_name="taskresults", | ||||
|         on_delete=models.CASCADE, | ||||
|     ) | ||||
|  | ||||
|     retcode = models.BigIntegerField(null=True, blank=True) | ||||
|     stdout = models.TextField(null=True, blank=True) | ||||
|     stderr = models.TextField(null=True, blank=True) | ||||
|     execution_time = models.CharField(max_length=100, default="0.0000") | ||||
|     last_run = models.DateTimeField(null=True, blank=True) | ||||
|     status = models.CharField( | ||||
|         max_length=30, choices=TaskStatus.choices, default=TaskStatus.PENDING | ||||
|     ) | ||||
|     sync_status = models.CharField( | ||||
|         max_length=100, choices=TaskSyncStatus.choices, default=TaskSyncStatus.INITIAL | ||||
|     ) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return f"{self.agent.hostname} - {self.task}" | ||||
|  | ||||
|     def get_or_create_alert_if_needed( | ||||
|         self, alert_template: "Optional[AlertTemplate]" | ||||
|     ) -> "Optional[Alert]": | ||||
|         from alerts.models import Alert | ||||
|  | ||||
|         return Alert.create_or_return_task_alert( | ||||
|             self.task, | ||||
|             agent=self.agent, | ||||
|             skip_create=not self.task.should_create_alert(alert_template), | ||||
|         ) | ||||
|  | ||||
|     def save_collector_results(self) -> None: | ||||
|  | ||||
|         agent_field = self.task.custom_field.get_or_create_field_value(self.agent) | ||||
|  | ||||
|         value = ( | ||||
|             self.stdout.strip() | ||||
|             if self.task.collector_all_output | ||||
|             else self.stdout.strip().split("\n")[-1].strip() | ||||
|         ) | ||||
|         agent_field.save_to_field(value) | ||||
|  | ||||
|     def send_email(self): | ||||
|         CORE = get_core_settings() | ||||
|  | ||||
|         # Format of Email sent when Task has email alert | ||||
|         if self.agent: | ||||
|             subject = f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - {self} Failed" | ||||
| @@ -662,12 +556,11 @@ class AutomatedTask(BaseAuditModel): | ||||
|             + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" | ||||
|         ) | ||||
|  | ||||
|         CORE.send_mail(subject, body, self.agent.alert_template)  # type: ignore | ||||
|         CORE.send_mail(subject, body, self.agent.alert_template) | ||||
|  | ||||
|     def send_sms(self): | ||||
|         from core.models import CoreSettings | ||||
|         CORE = get_core_settings() | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         # Format of SMS sent when Task has SMS alert | ||||
|         if self.agent: | ||||
|             subject = f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - {self} Failed" | ||||
| @@ -679,27 +572,24 @@ class AutomatedTask(BaseAuditModel): | ||||
|             + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" | ||||
|         ) | ||||
|  | ||||
|         CORE.send_sms(body, alert_template=self.agent.alert_template)  # type: ignore | ||||
|         CORE.send_sms(body, alert_template=self.agent.alert_template) | ||||
|  | ||||
|     def send_resolved_email(self): | ||||
|         from core.models import CoreSettings | ||||
|         CORE = get_core_settings() | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved" | ||||
|         body = ( | ||||
|             subject | ||||
|             + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" | ||||
|         ) | ||||
|  | ||||
|         CORE.send_mail(subject, body, alert_template=self.agent.alert_template)  # type: ignore | ||||
|         CORE.send_mail(subject, body, alert_template=self.agent.alert_template) | ||||
|  | ||||
|     def send_resolved_sms(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         CORE = get_core_settings() | ||||
|         subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved" | ||||
|         body = ( | ||||
|             subject | ||||
|             + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" | ||||
|         ) | ||||
|         CORE.send_sms(body, alert_template=self.agent.alert_template)  # type: ignore | ||||
|         CORE.send_sms(body, alert_template=self.agent.alert_template) | ||||
|   | ||||
| @@ -4,7 +4,7 @@ from tacticalrmm.permissions import _has_perm, _has_perm_on_agent | ||||
|  | ||||
|  | ||||
| class AutoTaskPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         if r.method == "GET": | ||||
|             if "agent_id" in view.kwargs.keys(): | ||||
|                 return _has_perm(r, "can_list_autotasks") and _has_perm_on_agent( | ||||
| @@ -17,5 +17,5 @@ class AutoTaskPerms(permissions.BasePermission): | ||||
|  | ||||
|  | ||||
| class RunAutoTaskPerms(permissions.BasePermission): | ||||
|     def has_permission(self, r, view): | ||||
|     def has_permission(self, r, view) -> bool: | ||||
|         return _has_perm(r, "can_run_autotasks") | ||||
|   | ||||
| @@ -1,20 +1,33 @@ | ||||
| from rest_framework import serializers | ||||
| from scripts.models import Script | ||||
| from django.core.exceptions import ObjectDoesNotExist | ||||
|  | ||||
| from .models import AutomatedTask | ||||
| from scripts.models import Script | ||||
| from tacticalrmm.constants import TaskType | ||||
|  | ||||
| from .models import AutomatedTask, TaskResult | ||||
|  | ||||
|  | ||||
| class TaskResultSerializer(serializers.ModelSerializer): | ||||
|     class Meta: | ||||
|         model = TaskResult | ||||
|         fields = "__all__" | ||||
|         read_only_fields = ("agent", "task") | ||||
|  | ||||
|  | ||||
| class TaskSerializer(serializers.ModelSerializer): | ||||
|  | ||||
|     check_name = serializers.ReadOnlyField(source="assigned_check.readable_desc") | ||||
|     schedule = serializers.ReadOnlyField() | ||||
|     last_run = serializers.ReadOnlyField(source="last_run_as_timezone") | ||||
|     alert_template = serializers.SerializerMethodField() | ||||
|     run_time_date = serializers.DateTimeField(format="iso-8601", required=False) | ||||
|     expire_date = serializers.DateTimeField( | ||||
|         format="iso-8601", allow_null=True, required=False | ||||
|     ) | ||||
|     run_time_date = serializers.DateTimeField(required=False) | ||||
|     expire_date = serializers.DateTimeField(allow_null=True, required=False) | ||||
|     task_result = serializers.SerializerMethodField() | ||||
|  | ||||
|     def get_task_result(self, obj): | ||||
|         return ( | ||||
|             TaskResultSerializer(obj.task_result).data | ||||
|             if isinstance(obj.task_result, TaskResult) | ||||
|             else {} | ||||
|         ) | ||||
|  | ||||
|     def validate_actions(self, value): | ||||
|  | ||||
| @@ -86,7 +99,14 @@ class TaskSerializer(serializers.ModelSerializer): | ||||
|  | ||||
|         # run_time_date required | ||||
|         if ( | ||||
|             data["task_type"] in ["runonce", "daily", "weekly", "monthly", "monthlydow"] | ||||
|             data["task_type"] | ||||
|             in [ | ||||
|                 TaskType.RUN_ONCE, | ||||
|                 TaskType.DAILY, | ||||
|                 TaskType.WEEKLY, | ||||
|                 TaskType.MONTHLY, | ||||
|                 TaskType.MONTHLY_DOW, | ||||
|             ] | ||||
|             and not data["run_time_date"] | ||||
|         ): | ||||
|             raise serializers.ValidationError( | ||||
| @@ -94,14 +114,14 @@ class TaskSerializer(serializers.ModelSerializer): | ||||
|             ) | ||||
|  | ||||
|         # daily task type validation | ||||
|         if data["task_type"] == "daily": | ||||
|         if data["task_type"] == TaskType.DAILY: | ||||
|             if "daily_interval" not in data or not data["daily_interval"]: | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"daily_interval is required for task_type '{data['task_type']}'" | ||||
|                 ) | ||||
|  | ||||
|         # weekly task type validation | ||||
|         elif data["task_type"] == "weekly": | ||||
|         elif data["task_type"] == TaskType.WEEKLY: | ||||
|             if "weekly_interval" not in data or not data["weekly_interval"]: | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"weekly_interval is required for task_type '{data['task_type']}'" | ||||
| @@ -113,7 +133,7 @@ class TaskSerializer(serializers.ModelSerializer): | ||||
|                 ) | ||||
|  | ||||
|         # monthly task type validation | ||||
|         elif data["task_type"] == "monthly": | ||||
|         elif data["task_type"] == TaskType.MONTHLY: | ||||
|             if ( | ||||
|                 "monthly_months_of_year" not in data | ||||
|                 or not data["monthly_months_of_year"] | ||||
| @@ -128,7 +148,7 @@ class TaskSerializer(serializers.ModelSerializer): | ||||
|                 ) | ||||
|  | ||||
|         # monthly day of week task type validation | ||||
|         elif data["task_type"] == "monthlydow": | ||||
|         elif data["task_type"] == TaskType.MONTHLY_DOW: | ||||
|             if ( | ||||
|                 "monthly_months_of_year" not in data | ||||
|                 or not data["monthly_months_of_year"] | ||||
| @@ -151,7 +171,7 @@ class TaskSerializer(serializers.ModelSerializer): | ||||
|                 ) | ||||
|  | ||||
|         # check failure task type validation | ||||
|         elif data["task_type"] == "checkfailure": | ||||
|         elif data["task_type"] == TaskType.CHECK_FAILURE: | ||||
|             if "assigned_check" not in data or not data["assigned_check"]: | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"assigned_check is required for task_type '{data['task_type']}'" | ||||
| @@ -187,13 +207,14 @@ class TaskGOGetSerializer(serializers.ModelSerializer): | ||||
|     def get_task_actions(self, obj): | ||||
|         tmp = [] | ||||
|         actions_to_remove = [] | ||||
|         agent = self.context["agent"] | ||||
|         for action in obj.actions: | ||||
|             if action["type"] == "cmd": | ||||
|                 tmp.append( | ||||
|                     { | ||||
|                         "type": "cmd", | ||||
|                         "command": Script.parse_script_args( | ||||
|                             agent=obj.agent, | ||||
|                             agent=agent, | ||||
|                             shell=action["shell"], | ||||
|                             args=[action["command"]], | ||||
|                         )[0], | ||||
| @@ -204,7 +225,7 @@ class TaskGOGetSerializer(serializers.ModelSerializer): | ||||
|             elif action["type"] == "script": | ||||
|                 try: | ||||
|                     script = Script.objects.get(pk=action["script"]) | ||||
|                 except ObjectDoesNotExist: | ||||
|                 except Script.DoesNotExist: | ||||
|                     # script doesn't exist so remove it | ||||
|                     actions_to_remove.append(action["script"]) | ||||
|                     continue | ||||
| @@ -214,12 +235,13 @@ class TaskGOGetSerializer(serializers.ModelSerializer): | ||||
|                         "script_name": script.name, | ||||
|                         "code": script.code, | ||||
|                         "script_args": Script.parse_script_args( | ||||
|                             agent=obj.agent, | ||||
|                             agent=agent, | ||||
|                             shell=script.shell, | ||||
|                             args=action["script_args"], | ||||
|                         ), | ||||
|                         "shell": script.shell, | ||||
|                         "timeout": action["timeout"], | ||||
|                         "run_as_user": script.run_as_user, | ||||
|                     } | ||||
|                 ) | ||||
|         if actions_to_remove: | ||||
| @@ -241,12 +263,6 @@ class TaskGOGetSerializer(serializers.ModelSerializer): | ||||
|         fields = ["id", "continue_on_error", "enabled", "task_actions"] | ||||
|  | ||||
|  | ||||
| class TaskRunnerPatchSerializer(serializers.ModelSerializer): | ||||
|     class Meta: | ||||
|         model = AutomatedTask | ||||
|         fields = "__all__" | ||||
|  | ||||
|  | ||||
| class TaskAuditSerializer(serializers.ModelSerializer): | ||||
|     class Meta: | ||||
|         model = AutomatedTask | ||||
|   | ||||
| @@ -2,121 +2,145 @@ import asyncio | ||||
| import datetime as dt | ||||
| import random | ||||
| from time import sleep | ||||
| from typing import Union | ||||
| from typing import Optional, Union | ||||
|  | ||||
| from autotasks.models import AutomatedTask | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from agents.models import Agent | ||||
| from alerts.models import Alert | ||||
| from autotasks.models import AutomatedTask, TaskResult | ||||
| from logs.models import DebugLog | ||||
|  | ||||
| from tacticalrmm.celery import app | ||||
| from tacticalrmm.constants import DebugLogType | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def create_win_task_schedule(pk): | ||||
|     task = AutomatedTask.objects.get(pk=pk) | ||||
| def create_win_task_schedule(pk: int, agent_id: Optional[str] = None) -> str: | ||||
|     try: | ||||
|         task = AutomatedTask.objects.get(pk=pk) | ||||
|  | ||||
|     task.create_task_on_agent() | ||||
|         if agent_id: | ||||
|             task.create_task_on_agent(Agent.objects.get(agent_id=agent_id)) | ||||
|         else: | ||||
|             task.create_task_on_agent() | ||||
|     except (AutomatedTask.DoesNotExist, Agent.DoesNotExist): | ||||
|         pass | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def modify_win_task(pk): | ||||
|     task = AutomatedTask.objects.get(pk=pk) | ||||
| def modify_win_task(pk: int, agent_id: Optional[str] = None) -> str: | ||||
|     try: | ||||
|         task = AutomatedTask.objects.get(pk=pk) | ||||
|  | ||||
|     task.modify_task_on_agent() | ||||
|         if agent_id: | ||||
|             task.modify_task_on_agent(Agent.objects.get(agent_id=agent_id)) | ||||
|         else: | ||||
|             task.modify_task_on_agent() | ||||
|     except (AutomatedTask.DoesNotExist, Agent.DoesNotExist): | ||||
|         pass | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def delete_win_task_schedule(pk): | ||||
|     task = AutomatedTask.objects.get(pk=pk) | ||||
| def delete_win_task_schedule(pk: int, agent_id: Optional[str] = None) -> str: | ||||
|     try: | ||||
|         task = AutomatedTask.objects.get(pk=pk) | ||||
|  | ||||
|         if agent_id: | ||||
|             task.delete_task_on_agent(Agent.objects.get(agent_id=agent_id)) | ||||
|         else: | ||||
|             task.delete_task_on_agent() | ||||
|     except (AutomatedTask.DoesNotExist, Agent.DoesNotExist): | ||||
|         pass | ||||
|  | ||||
|     task.delete_task_on_agent() | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def run_win_task(pk): | ||||
|     task = AutomatedTask.objects.get(pk=pk) | ||||
|     task.run_win_task() | ||||
| def run_win_task(pk: int, agent_id: Optional[str] = None) -> str: | ||||
|     try: | ||||
|         task = AutomatedTask.objects.get(pk=pk) | ||||
|  | ||||
|         if agent_id: | ||||
|             task.run_win_task(Agent.objects.get(agent_id=agent_id)) | ||||
|         else: | ||||
|             task.run_win_task() | ||||
|     except (AutomatedTask.DoesNotExist, Agent.DoesNotExist): | ||||
|         pass | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def remove_orphaned_win_tasks(agentpk): | ||||
| def remove_orphaned_win_tasks() -> None: | ||||
|     from agents.models import Agent | ||||
|  | ||||
|     agent = Agent.objects.get(pk=agentpk) | ||||
|     for agent in Agent.online_agents(): | ||||
|         r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10)) | ||||
|  | ||||
|     DebugLog.info( | ||||
|         agent=agent, | ||||
|         log_type="agent_issues", | ||||
|         message=f"Orphaned task cleanup initiated on {agent.hostname}.", | ||||
|     ) | ||||
|  | ||||
|     r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10)) | ||||
|  | ||||
|     if not isinstance(r, list) and not r:  # empty list | ||||
|         DebugLog.error( | ||||
|             agent=agent, | ||||
|             log_type="agent_issues", | ||||
|             message=f"Unable to clean up scheduled tasks on {agent.hostname}: {r}", | ||||
|         ) | ||||
|         return "notlist" | ||||
|  | ||||
|     agent_task_names = list(agent.autotasks.values_list("win_task_name", flat=True)) | ||||
|  | ||||
|     exclude_tasks = ( | ||||
|         "TacticalRMM_fixmesh", | ||||
|         "TacticalRMM_SchedReboot", | ||||
|         "TacticalRMM_sync", | ||||
|         "TacticalRMM_agentupdate", | ||||
|     ) | ||||
|  | ||||
|     for task in r: | ||||
|         if task.startswith(exclude_tasks): | ||||
|             # skip system tasks or any pending reboots | ||||
|         if not isinstance(r, list):  # empty list | ||||
|             DebugLog.error( | ||||
|                 agent=agent, | ||||
|                 log_type=DebugLogType.AGENT_ISSUES, | ||||
|                 message=f"Unable to pull list of scheduled tasks on {agent.hostname}: {r}", | ||||
|             ) | ||||
|             continue | ||||
|  | ||||
|         if task.startswith("TacticalRMM_") and task not in agent_task_names: | ||||
|             # delete task since it doesn't exist in UI | ||||
|             nats_data = { | ||||
|                 "func": "delschedtask", | ||||
|                 "schedtaskpayload": {"name": task}, | ||||
|             } | ||||
|             ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10)) | ||||
|             if ret != "ok": | ||||
|                 DebugLog.error( | ||||
|                     agent=agent, | ||||
|                     log_type="agent_issues", | ||||
|                     message=f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}", | ||||
|                 ) | ||||
|             else: | ||||
|                 DebugLog.info( | ||||
|                     agent=agent, | ||||
|                     log_type="agent_issues", | ||||
|                     message=f"Removed orphaned task {task} from {agent.hostname}", | ||||
|                 ) | ||||
|         agent_task_names = [ | ||||
|             task.win_task_name for task in agent.get_tasks_with_policies() | ||||
|         ] | ||||
|  | ||||
|     DebugLog.info( | ||||
|         agent=agent, | ||||
|         log_type="agent_issues", | ||||
|         message=f"Orphaned task cleanup finished on {agent.hostname}", | ||||
|     ) | ||||
|         exclude_tasks = ( | ||||
|             "TacticalRMM_fixmesh", | ||||
|             "TacticalRMM_SchedReboot", | ||||
|             "TacticalRMM_sync", | ||||
|             "TacticalRMM_agentupdate", | ||||
|         ) | ||||
|  | ||||
|         for task in r: | ||||
|             if task.startswith(exclude_tasks): | ||||
|                 # skip system tasks or any pending reboots | ||||
|                 continue | ||||
|  | ||||
|             if task.startswith("TacticalRMM_") and task not in agent_task_names: | ||||
|                 # delete task since it doesn't exist in UI | ||||
|                 nats_data = { | ||||
|                     "func": "delschedtask", | ||||
|                     "schedtaskpayload": {"name": task}, | ||||
|                 } | ||||
|                 ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10)) | ||||
|                 if ret != "ok": | ||||
|                     DebugLog.error( | ||||
|                         agent=agent, | ||||
|                         log_type=DebugLogType.AGENT_ISSUES, | ||||
|                         message=f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}", | ||||
|                     ) | ||||
|                 else: | ||||
|                     DebugLog.info( | ||||
|                         agent=agent, | ||||
|                         log_type=DebugLogType.AGENT_ISSUES, | ||||
|                         message=f"Removed orphaned task {task} from {agent.hostname}", | ||||
|                     ) | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|     try: | ||||
|         alert = Alert.objects.get(pk=pk) | ||||
|     except Alert.DoesNotExist: | ||||
|         return "alert not found" | ||||
|  | ||||
|     # first time sending email | ||||
|     if not alert.email_sent: | ||||
|         sleep(random.randint(1, 10)) | ||||
|         alert.assigned_task.send_email() | ||||
|         task_result = TaskResult.objects.get( | ||||
|             task=alert.assigned_task, agent=alert.agent | ||||
|         ) | ||||
|         sleep(random.randint(1, 5)) | ||||
|         task_result.send_email() | ||||
|         alert.email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["email_sent"]) | ||||
|     else: | ||||
| @@ -124,8 +148,11 @@ def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None) | ||||
|             # send an email only if the last email sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.email_sent < delta: | ||||
|                 sleep(random.randint(1, 10)) | ||||
|                 alert.assigned_task.send_email() | ||||
|                 task_result = TaskResult.objects.get( | ||||
|                     task=alert.assigned_task, agent=alert.agent | ||||
|                 ) | ||||
|                 sleep(random.randint(1, 5)) | ||||
|                 task_result.send_email() | ||||
|                 alert.email_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["email_sent"]) | ||||
|  | ||||
| @@ -134,14 +161,19 @@ def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None) | ||||
|  | ||||
| @app.task | ||||
| def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|     try: | ||||
|         alert = Alert.objects.get(pk=pk) | ||||
|     except Alert.DoesNotExist: | ||||
|         return "alert not found" | ||||
|  | ||||
|     # first time sending text | ||||
|     if not alert.sms_sent: | ||||
|         task_result = TaskResult.objects.get( | ||||
|             task=alert.assigned_task, agent=alert.agent | ||||
|         ) | ||||
|         sleep(random.randint(1, 3)) | ||||
|         alert.assigned_task.send_sms() | ||||
|         task_result.send_sms() | ||||
|         alert.sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["sms_sent"]) | ||||
|     else: | ||||
| @@ -149,8 +181,11 @@ def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) -> | ||||
|             # send a text only if the last text sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.sms_sent < delta: | ||||
|                 task_result = TaskResult.objects.get( | ||||
|                     task=alert.assigned_task, agent=alert.agent | ||||
|                 ) | ||||
|                 sleep(random.randint(1, 3)) | ||||
|                 alert.assigned_task.send_sms() | ||||
|                 task_result.send_sms() | ||||
|                 alert.sms_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["sms_sent"]) | ||||
|  | ||||
| @@ -159,14 +194,19 @@ def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) -> | ||||
|  | ||||
| @app.task | ||||
| def handle_resolved_task_sms_alert(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|     try: | ||||
|         alert = Alert.objects.get(pk=pk) | ||||
|     except Alert.DoesNotExist: | ||||
|         return "alert not found" | ||||
|  | ||||
|     # first time sending text | ||||
|     if not alert.resolved_sms_sent: | ||||
|         task_result = TaskResult.objects.get( | ||||
|             task=alert.assigned_task, agent=alert.agent | ||||
|         ) | ||||
|         sleep(random.randint(1, 3)) | ||||
|         alert.assigned_task.send_resolved_sms() | ||||
|         task_result.send_resolved_sms() | ||||
|         alert.resolved_sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["resolved_sms_sent"]) | ||||
|  | ||||
| @@ -175,14 +215,19 @@ def handle_resolved_task_sms_alert(pk: int) -> str: | ||||
|  | ||||
| @app.task | ||||
| def handle_resolved_task_email_alert(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|     try: | ||||
|         alert = Alert.objects.get(pk=pk) | ||||
|     except Alert.DoesNotExist: | ||||
|         return "alert not found" | ||||
|  | ||||
|     # first time sending email | ||||
|     if not alert.resolved_email_sent: | ||||
|         sleep(random.randint(1, 10)) | ||||
|         alert.assigned_task.send_resolved_email() | ||||
|         task_result = TaskResult.objects.get( | ||||
|             task=alert.assigned_task, agent=alert.agent | ||||
|         ) | ||||
|         sleep(random.randint(1, 5)) | ||||
|         task_result.send_resolved_email() | ||||
|         alert.resolved_email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["resolved_email_sent"]) | ||||
|  | ||||
|   | ||||
| @@ -1,12 +1,12 @@ | ||||
| import datetime as dt | ||||
| from unittest.mock import call, patch | ||||
|  | ||||
| from django.utils import timezone as djangotime | ||||
| from model_bakery import baker | ||||
|  | ||||
| from tacticalrmm.constants import TaskType | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
|  | ||||
| from .models import AutomatedTask | ||||
| from .models import AutomatedTask, TaskResult, TaskSyncStatus | ||||
| from .serializers import TaskSerializer | ||||
| from .tasks import create_win_task_schedule, remove_orphaned_win_tasks, run_win_task | ||||
|  | ||||
| @@ -44,11 +44,8 @@ class TestAutotaskViews(TacticalTestCase): | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(len(resp.data), 4) | ||||
|  | ||||
|     @patch("automation.tasks.generate_agent_autotasks_task.delay") | ||||
|     @patch("autotasks.tasks.create_win_task_schedule.delay") | ||||
|     def test_add_autotask( | ||||
|         self, create_win_task_schedule, generate_agent_autotasks_task | ||||
|     ): | ||||
|     def test_add_autotask(self, create_win_task_schedule): | ||||
|         url = f"{base_url}/" | ||||
|  | ||||
|         # setup data | ||||
| @@ -238,20 +235,6 @@ class TestAutotaskViews(TacticalTestCase): | ||||
|         create_win_task_schedule.assert_called() | ||||
|         create_win_task_schedule.reset_mock() | ||||
|  | ||||
|         # test add task to policy | ||||
|         data = { | ||||
|             "policy": policy.id,  # type: ignore | ||||
|             "name": "Test Task Manual", | ||||
|             "enabled": True, | ||||
|             "task_type": "manual", | ||||
|             "actions": actions, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         generate_agent_autotasks_task.assert_called_with(policy=policy.id)  # type: ignore | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     def test_get_autotask(self): | ||||
| @@ -266,15 +249,11 @@ class TestAutotaskViews(TacticalTestCase): | ||||
|         serializer = TaskSerializer(task) | ||||
|  | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data)  # type: ignore | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     @patch("autotasks.tasks.modify_win_task.delay") | ||||
|     @patch("automation.tasks.update_policy_autotasks_fields_task.delay") | ||||
|     def test_update_autotask( | ||||
|         self, update_policy_autotasks_fields_task, modify_win_task | ||||
|     ): | ||||
|     def test_update_autotask(self): | ||||
|         # setup data | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         agent_task = baker.make("autotasks.AutomatedTask", agent=agent) | ||||
| @@ -292,22 +271,19 @@ class TestAutotaskViews(TacticalTestCase): | ||||
|         resp = self.client.put(f"{base_url}/500/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         url = f"{base_url}/{agent_task.id}/"  # type: ignore | ||||
|         url = f"{base_url}/{agent_task.id}/" | ||||
|  | ||||
|         # test editing agent task with no task update | ||||
|         data = {"name": "New Name"} | ||||
|  | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         modify_win_task.not_called()  # type: ignore | ||||
|  | ||||
|         # test editing agent task with agent task update | ||||
|         data = {"enabled": False} | ||||
|  | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         modify_win_task.assert_called_with(pk=agent_task.id)  # type: ignore | ||||
|         modify_win_task.reset_mock() | ||||
|  | ||||
|         # test editing agent task with task_type | ||||
|         data = { | ||||
| @@ -323,13 +299,11 @@ class TestAutotaskViews(TacticalTestCase): | ||||
|             "repetition_duration": "1H", | ||||
|             "random_task_delay": "5M", | ||||
|             "custom_field": custom_field.id, | ||||
|             "run_asap_afteR_missed": False, | ||||
|             "run_asap_after_missed": False, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         modify_win_task.assert_called_with(pk=agent_task.id)  # type: ignore | ||||
|         modify_win_task.reset_mock() | ||||
|  | ||||
|         # test trying to edit with empty actions | ||||
|         data = { | ||||
| @@ -349,35 +323,12 @@ class TestAutotaskViews(TacticalTestCase): | ||||
|  | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|         modify_win_task.assert_not_called  # type: ignore | ||||
|  | ||||
|         # test editing policy tasks | ||||
|         url = f"{base_url}/{policy_task.id}/"  # type: ignore | ||||
|  | ||||
|         # test editing policy task | ||||
|         data = {"enabled": False} | ||||
|  | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         update_policy_autotasks_fields_task.assert_called_with( | ||||
|             task=policy_task.id, update_agent=True  # type: ignore | ||||
|         ) | ||||
|         update_policy_autotasks_fields_task.reset_mock() | ||||
|  | ||||
|         # test editing policy task with no agent update | ||||
|         data = {"name": "New Name"} | ||||
|  | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         update_policy_autotasks_fields_task.assert_called_with(task=policy_task.id) | ||||
|  | ||||
|         self.check_not_authenticated("put", url) | ||||
|  | ||||
|     @patch("autotasks.tasks.remove_orphaned_win_tasks.delay") | ||||
|     @patch("autotasks.tasks.delete_win_task_schedule.delay") | ||||
|     @patch("automation.tasks.delete_policy_autotasks_task.delay") | ||||
|     def test_delete_autotask( | ||||
|         self, delete_policy_autotasks_task, delete_win_task_schedule | ||||
|     ): | ||||
|     def test_delete_autotask(self, delete_win_task_schedule, remove_orphaned_win_tasks): | ||||
|         # setup data | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         agent_task = baker.make("autotasks.AutomatedTask", agent=agent) | ||||
| @@ -389,17 +340,20 @@ class TestAutotaskViews(TacticalTestCase): | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         # test delete agent task | ||||
|         url = f"{base_url}/{agent_task.id}/"  # type: ignore | ||||
|         url = f"{base_url}/{agent_task.id}/" | ||||
|         resp = self.client.delete(url, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         delete_win_task_schedule.assert_called_with(pk=agent_task.id)  # type: ignore | ||||
|         delete_win_task_schedule.assert_called_with(pk=agent_task.id) | ||||
|         remove_orphaned_win_tasks.assert_not_called() | ||||
|  | ||||
|         delete_win_task_schedule.reset_mock() | ||||
|         remove_orphaned_win_tasks.reset_mock() | ||||
|         # test delete policy task | ||||
|         url = f"{base_url}/{policy_task.id}/"  # type: ignore | ||||
|         url = f"{base_url}/{policy_task.id}/" | ||||
|         resp = self.client.delete(url, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertFalse(AutomatedTask.objects.filter(pk=policy_task.id))  # type: ignore | ||||
|         delete_policy_autotasks_task.assert_called_with(task=policy_task.id)  # type: ignore | ||||
|         remove_orphaned_win_tasks.assert_called_once() | ||||
|         delete_win_task_schedule.assert_not_called() | ||||
|  | ||||
|         self.check_not_authenticated("delete", url) | ||||
|  | ||||
| @@ -414,7 +368,7 @@ class TestAutotaskViews(TacticalTestCase): | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         # test run agent task | ||||
|         url = f"{base_url}/{task.id}/run/"  # type: ignore | ||||
|         url = f"{base_url}/{task.id}/run/" | ||||
|         resp = self.client.post(url, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         run_win_task.assert_called() | ||||
| @@ -429,11 +383,11 @@ class TestAutoTaskCeleryTasks(TacticalTestCase): | ||||
|  | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     def test_remove_orphaned_win_task(self, nats_cmd): | ||||
|         self.agent = baker.make_recipe("agents.agent") | ||||
|         self.task1 = AutomatedTask.objects.create( | ||||
|             agent=self.agent, | ||||
|         agent = baker.make_recipe("agents.online_agent") | ||||
|         baker.make_recipe("agents.offline_agent") | ||||
|         task1 = AutomatedTask.objects.create( | ||||
|             agent=agent, | ||||
|             name="test task 1", | ||||
|             win_task_name=AutomatedTask.generate_task_name(), | ||||
|         ) | ||||
|  | ||||
|         # test removing an orphaned task | ||||
| @@ -443,13 +397,13 @@ class TestAutoTaskCeleryTasks(TacticalTestCase): | ||||
|             "GoogleUpdateTaskMachineCore", | ||||
|             "GoogleUpdateTaskMachineUA", | ||||
|             "OneDrive Standalone Update Task-S-1-5-21-717461175-241712648-1206041384-1001", | ||||
|             self.task1.win_task_name, | ||||
|             task1.win_task_name, | ||||
|             "TacticalRMM_fixmesh", | ||||
|             "TacticalRMM_SchedReboot_jk324kajd", | ||||
|             "TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb",  # orphaned task | ||||
|         ] | ||||
|  | ||||
|         self.calls = [ | ||||
|         calls = [ | ||||
|             call({"func": "listschedtasks"}, timeout=10), | ||||
|             call( | ||||
|                 { | ||||
| @@ -463,26 +417,23 @@ class TestAutoTaskCeleryTasks(TacticalTestCase): | ||||
|         ] | ||||
|  | ||||
|         nats_cmd.side_effect = [win_tasks, "ok"] | ||||
|         ret = remove_orphaned_win_tasks.s(self.agent.pk).apply() | ||||
|         remove_orphaned_win_tasks() | ||||
|         self.assertEqual(nats_cmd.call_count, 2) | ||||
|         nats_cmd.assert_has_calls(self.calls) | ||||
|         self.assertEqual(ret.status, "SUCCESS") | ||||
|         nats_cmd.assert_has_calls(calls) | ||||
|  | ||||
|         # test nats delete task fail | ||||
|         nats_cmd.reset_mock() | ||||
|         nats_cmd.side_effect = [win_tasks, "error deleting task"] | ||||
|         ret = remove_orphaned_win_tasks.s(self.agent.pk).apply() | ||||
|         nats_cmd.assert_has_calls(self.calls) | ||||
|         remove_orphaned_win_tasks() | ||||
|         nats_cmd.assert_has_calls(calls) | ||||
|         self.assertEqual(nats_cmd.call_count, 2) | ||||
|         self.assertEqual(ret.status, "SUCCESS") | ||||
|  | ||||
|         # no orphaned tasks | ||||
|         nats_cmd.reset_mock() | ||||
|         win_tasks.remove("TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb") | ||||
|         nats_cmd.side_effect = [win_tasks, "ok"] | ||||
|         ret = remove_orphaned_win_tasks.s(self.agent.pk).apply() | ||||
|         remove_orphaned_win_tasks() | ||||
|         self.assertEqual(nats_cmd.call_count, 1) | ||||
|         self.assertEqual(ret.status, "SUCCESS") | ||||
|  | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     def test_run_win_task(self, nats_cmd): | ||||
| @@ -490,159 +441,326 @@ class TestAutoTaskCeleryTasks(TacticalTestCase): | ||||
|         self.task1 = AutomatedTask.objects.create( | ||||
|             agent=self.agent, | ||||
|             name="test task 1", | ||||
|             win_task_name=AutomatedTask.generate_task_name(), | ||||
|         ) | ||||
|         nats_cmd.return_value = "ok" | ||||
|         ret = run_win_task.s(self.task1.pk).apply() | ||||
|         self.assertEqual(ret.status, "SUCCESS") | ||||
|  | ||||
|     # @patch("agents.models.Agent.nats_cmd") | ||||
|     # def test_create_win_task_schedule(self, nats_cmd): | ||||
|     #     self.agent = baker.make_recipe("agents.agent") | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     def test_create_win_task_schedule(self, nats_cmd): | ||||
|         agent = baker.make_recipe("agents.agent", time_zone="UTC") | ||||
|  | ||||
|     #     task_name = AutomatedTask.generate_task_name() | ||||
|     #     # test scheduled task | ||||
|     #     self.task1 = AutomatedTask.objects.create( | ||||
|     #         agent=self.agent, | ||||
|     #         name="test task 1", | ||||
|     #         win_task_name=task_name, | ||||
|     #         task_type="scheduled", | ||||
|     #         run_time_bit_weekdays=127, | ||||
|     #         run_time_minute="21:55", | ||||
|     #     ) | ||||
|     #     self.assertEqual(self.task1.sync_status, "initial") | ||||
|     #     nats_cmd.return_value = "ok" | ||||
|     #     ret = create_win_task_schedule.s(pk=self.task1.pk).apply() | ||||
|     #     self.assertEqual(nats_cmd.call_count, 1) | ||||
|     #     nats_cmd.assert_called_with( | ||||
|     #         { | ||||
|     #             "func": "schedtask", | ||||
|     #             "schedtaskpayload": { | ||||
|     #                 "type": "rmm", | ||||
|     #                 "trigger": "weekly", | ||||
|     #                 "weekdays": 127, | ||||
|     #                 "pk": self.task1.pk, | ||||
|     #                 "name": task_name, | ||||
|     #                 "hour": 21, | ||||
|     #                 "min": 55, | ||||
|     #             }, | ||||
|     #         }, | ||||
|     #         timeout=5, | ||||
|     #     ) | ||||
|     #     self.task1 = AutomatedTask.objects.get(pk=self.task1.pk) | ||||
|     #     self.assertEqual(self.task1.sync_status, "synced") | ||||
|         # test daily task | ||||
|         task1 = baker.make( | ||||
|             "autotasks.AutomatedTask", | ||||
|             agent=agent, | ||||
|             name="test task 1", | ||||
|             task_type=TaskType.DAILY, | ||||
|             daily_interval=1, | ||||
|             run_time_date=djangotime.now() + djangotime.timedelta(hours=3, minutes=30), | ||||
|         ) | ||||
|         self.assertFalse(TaskResult.objects.filter(agent=agent, task=task1).exists()) | ||||
|  | ||||
|     #     nats_cmd.return_value = "timeout" | ||||
|     #     ret = create_win_task_schedule.s(pk=self.task1.pk).apply() | ||||
|     #     self.assertEqual(ret.status, "SUCCESS") | ||||
|     #     self.task1 = AutomatedTask.objects.get(pk=self.task1.pk) | ||||
|     #     self.assertEqual(self.task1.sync_status, "initial") | ||||
|         nats_cmd.return_value = "ok" | ||||
|         create_win_task_schedule(pk=task1.pk) | ||||
|         nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "schedtask", | ||||
|                 "schedtaskpayload": { | ||||
|                     "pk": task1.pk, | ||||
|                     "type": "rmm", | ||||
|                     "name": task1.win_task_name, | ||||
|                     "overwrite_task": False, | ||||
|                     "enabled": True, | ||||
|                     "trigger": "daily", | ||||
|                     "multiple_instances": 1, | ||||
|                     "delete_expired_task_after": False, | ||||
|                     "start_when_available": False, | ||||
|                     "start_year": int(task1.run_time_date.strftime("%Y")), | ||||
|                     "start_month": int(task1.run_time_date.strftime("%-m")), | ||||
|                     "start_day": int(task1.run_time_date.strftime("%-d")), | ||||
|                     "start_hour": int(task1.run_time_date.strftime("%-H")), | ||||
|                     "start_min": int(task1.run_time_date.strftime("%-M")), | ||||
|                     "day_interval": 1, | ||||
|                 }, | ||||
|             }, | ||||
|             timeout=5, | ||||
|         ) | ||||
|         nats_cmd.reset_mock() | ||||
|         self.assertEqual( | ||||
|             TaskResult.objects.get(task=task1, agent=agent).sync_status, | ||||
|             TaskSyncStatus.SYNCED, | ||||
|         ) | ||||
|  | ||||
|     #     # test runonce with future date | ||||
|     #     nats_cmd.reset_mock() | ||||
|     #     task_name = AutomatedTask.generate_task_name() | ||||
|     #     run_time_date = djangotime.now() + djangotime.timedelta(hours=22) | ||||
|     #     self.task2 = AutomatedTask.objects.create( | ||||
|     #         agent=self.agent, | ||||
|     #         name="test task 2", | ||||
|     #         win_task_name=task_name, | ||||
|     #         task_type="runonce", | ||||
|     #         run_time_date=run_time_date, | ||||
|     #     ) | ||||
|     #     nats_cmd.return_value = "ok" | ||||
|     #     ret = create_win_task_schedule.s(pk=self.task2.pk).apply() | ||||
|     #     nats_cmd.assert_called_with( | ||||
|     #         { | ||||
|     #             "func": "schedtask", | ||||
|     #             "schedtaskpayload": { | ||||
|     #                 "type": "rmm", | ||||
|     #                 "trigger": "once", | ||||
|     #                 "pk": self.task2.pk, | ||||
|     #                 "name": task_name, | ||||
|     #                 "year": int(dt.datetime.strftime(self.task2.run_time_date, "%Y")), | ||||
|     #                 "month": dt.datetime.strftime(self.task2.run_time_date, "%B"), | ||||
|     #                 "day": int(dt.datetime.strftime(self.task2.run_time_date, "%d")), | ||||
|     #                 "hour": int(dt.datetime.strftime(self.task2.run_time_date, "%H")), | ||||
|     #                 "min": int(dt.datetime.strftime(self.task2.run_time_date, "%M")), | ||||
|     #             }, | ||||
|     #         }, | ||||
|     #         timeout=5, | ||||
|     #     ) | ||||
|     #     self.assertEqual(ret.status, "SUCCESS") | ||||
|         nats_cmd.return_value = "timeout" | ||||
|         create_win_task_schedule(pk=task1.pk) | ||||
|         self.assertEqual( | ||||
|             TaskResult.objects.get(task=task1, agent=agent).sync_status, | ||||
|             TaskSyncStatus.INITIAL, | ||||
|         ) | ||||
|         nats_cmd.reset_mock() | ||||
|  | ||||
|     #     # test runonce with date in the past | ||||
|     #     nats_cmd.reset_mock() | ||||
|     #     task_name = AutomatedTask.generate_task_name() | ||||
|     #     run_time_date = djangotime.now() - djangotime.timedelta(days=13) | ||||
|     #     self.task3 = AutomatedTask.objects.create( | ||||
|     #         agent=self.agent, | ||||
|     #         name="test task 3", | ||||
|     #         win_task_name=task_name, | ||||
|     #         task_type="runonce", | ||||
|     #         run_time_date=run_time_date, | ||||
|     #     ) | ||||
|     #     nats_cmd.return_value = "ok" | ||||
|     #     ret = create_win_task_schedule.s(pk=self.task3.pk).apply() | ||||
|     #     self.task3 = AutomatedTask.objects.get(pk=self.task3.pk) | ||||
|     #     self.assertEqual(ret.status, "SUCCESS") | ||||
|         # test weekly task | ||||
|         task1 = baker.make( | ||||
|             "autotasks.AutomatedTask", | ||||
|             agent=agent, | ||||
|             name="test task 1", | ||||
|             task_type=TaskType.WEEKLY, | ||||
|             weekly_interval=1, | ||||
|             run_asap_after_missed=True, | ||||
|             run_time_bit_weekdays=127, | ||||
|             run_time_date=djangotime.now() + djangotime.timedelta(hours=3, minutes=30), | ||||
|             expire_date=djangotime.now() + djangotime.timedelta(days=100), | ||||
|             task_instance_policy=2, | ||||
|         ) | ||||
|  | ||||
|     #     # test checkfailure | ||||
|     #     nats_cmd.reset_mock() | ||||
|     #     self.check = baker.make_recipe("checks.diskspace_check", agent=self.agent) | ||||
|     #     task_name = AutomatedTask.generate_task_name() | ||||
|     #     self.task4 = AutomatedTask.objects.create( | ||||
|     #         agent=self.agent, | ||||
|     #         name="test task 4", | ||||
|     #         win_task_name=task_name, | ||||
|     #         task_type="checkfailure", | ||||
|     #         assigned_check=self.check, | ||||
|     #     ) | ||||
|     #     nats_cmd.return_value = "ok" | ||||
|     #     ret = create_win_task_schedule.s(pk=self.task4.pk).apply() | ||||
|     #     nats_cmd.assert_called_with( | ||||
|     #         { | ||||
|     #             "func": "schedtask", | ||||
|     #             "schedtaskpayload": { | ||||
|     #                 "type": "rmm", | ||||
|     #                 "trigger": "manual", | ||||
|     #                 "pk": self.task4.pk, | ||||
|     #                 "name": task_name, | ||||
|     #             }, | ||||
|     #         }, | ||||
|     #         timeout=5, | ||||
|     #     ) | ||||
|     #     self.assertEqual(ret.status, "SUCCESS") | ||||
|         nats_cmd.return_value = "ok" | ||||
|         create_win_task_schedule(pk=task1.pk) | ||||
|         nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "schedtask", | ||||
|                 "schedtaskpayload": { | ||||
|                     "pk": task1.pk, | ||||
|                     "type": "rmm", | ||||
|                     "name": task1.win_task_name, | ||||
|                     "overwrite_task": False, | ||||
|                     "enabled": True, | ||||
|                     "trigger": "weekly", | ||||
|                     "multiple_instances": 2, | ||||
|                     "delete_expired_task_after": False, | ||||
|                     "start_when_available": True, | ||||
|                     "start_year": int(task1.run_time_date.strftime("%Y")), | ||||
|                     "start_month": int(task1.run_time_date.strftime("%-m")), | ||||
|                     "start_day": int(task1.run_time_date.strftime("%-d")), | ||||
|                     "start_hour": int(task1.run_time_date.strftime("%-H")), | ||||
|                     "start_min": int(task1.run_time_date.strftime("%-M")), | ||||
|                     "expire_year": int(task1.expire_date.strftime("%Y")), | ||||
|                     "expire_month": int(task1.expire_date.strftime("%-m")), | ||||
|                     "expire_day": int(task1.expire_date.strftime("%-d")), | ||||
|                     "expire_hour": int(task1.expire_date.strftime("%-H")), | ||||
|                     "expire_min": int(task1.expire_date.strftime("%-M")), | ||||
|                     "week_interval": 1, | ||||
|                     "days_of_week": 127, | ||||
|                 }, | ||||
|             }, | ||||
|             timeout=5, | ||||
|         ) | ||||
|         nats_cmd.reset_mock() | ||||
|  | ||||
|     #     # test manual | ||||
|     #     nats_cmd.reset_mock() | ||||
|     #     task_name = AutomatedTask.generate_task_name() | ||||
|     #     self.task5 = AutomatedTask.objects.create( | ||||
|     #         agent=self.agent, | ||||
|     #         name="test task 5", | ||||
|     #         win_task_name=task_name, | ||||
|     #         task_type="manual", | ||||
|     #     ) | ||||
|     #     nats_cmd.return_value = "ok" | ||||
|     #     ret = create_win_task_schedule.s(pk=self.task5.pk).apply() | ||||
|     #     nats_cmd.assert_called_with( | ||||
|     #         { | ||||
|     #             "func": "schedtask", | ||||
|     #             "schedtaskpayload": { | ||||
|     #                 "type": "rmm", | ||||
|     #                 "trigger": "manual", | ||||
|     #                 "pk": self.task5.pk, | ||||
|     #                 "name": task_name, | ||||
|     #             }, | ||||
|     #         }, | ||||
|     #         timeout=5, | ||||
|     #     ) | ||||
|     #     self.assertEqual(ret.status, "SUCCESS") | ||||
|         # test monthly task | ||||
|         task1 = baker.make( | ||||
|             "autotasks.AutomatedTask", | ||||
|             agent=agent, | ||||
|             name="test task 1", | ||||
|             task_type=TaskType.MONTHLY, | ||||
|             random_task_delay="3M", | ||||
|             task_repetition_interval="15M", | ||||
|             task_repetition_duration="1D", | ||||
|             stop_task_at_duration_end=True, | ||||
|             monthly_days_of_month=0x80000030, | ||||
|             monthly_months_of_year=0x400, | ||||
|             run_time_date=djangotime.now() + djangotime.timedelta(hours=3, minutes=30), | ||||
|         ) | ||||
|  | ||||
|         nats_cmd.return_value = "ok" | ||||
|         create_win_task_schedule(pk=task1.pk) | ||||
|         nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "schedtask", | ||||
|                 "schedtaskpayload": { | ||||
|                     "pk": task1.pk, | ||||
|                     "type": "rmm", | ||||
|                     "name": task1.win_task_name, | ||||
|                     "overwrite_task": False, | ||||
|                     "enabled": True, | ||||
|                     "trigger": "monthly", | ||||
|                     "multiple_instances": 1, | ||||
|                     "delete_expired_task_after": False, | ||||
|                     "start_when_available": False, | ||||
|                     "start_year": int(task1.run_time_date.strftime("%Y")), | ||||
|                     "start_month": int(task1.run_time_date.strftime("%-m")), | ||||
|                     "start_day": int(task1.run_time_date.strftime("%-d")), | ||||
|                     "start_hour": int(task1.run_time_date.strftime("%-H")), | ||||
|                     "start_min": int(task1.run_time_date.strftime("%-M")), | ||||
|                     "random_delay": "PT3M", | ||||
|                     "repetition_interval": "PT15M", | ||||
|                     "repetition_duration": "P1DT", | ||||
|                     "stop_at_duration_end": True, | ||||
|                     "days_of_month": 0x30, | ||||
|                     "run_on_last_day_of_month": True, | ||||
|                     "months_of_year": 1024, | ||||
|                 }, | ||||
|             }, | ||||
|             timeout=5, | ||||
|         ) | ||||
|         nats_cmd.reset_mock() | ||||
|  | ||||
|         # test monthly dow | ||||
|         task1 = baker.make( | ||||
|             "autotasks.AutomatedTask", | ||||
|             agent=agent, | ||||
|             name="test task 1", | ||||
|             task_type=TaskType.MONTHLY_DOW, | ||||
|             run_time_bit_weekdays=56, | ||||
|             monthly_months_of_year=0x400, | ||||
|             monthly_weeks_of_month=3, | ||||
|             run_time_date=djangotime.now() + djangotime.timedelta(hours=3, minutes=30), | ||||
|         ) | ||||
|         nats_cmd.return_value = "ok" | ||||
|         create_win_task_schedule(pk=task1.pk) | ||||
|         nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "schedtask", | ||||
|                 "schedtaskpayload": { | ||||
|                     "pk": task1.pk, | ||||
|                     "type": "rmm", | ||||
|                     "name": task1.win_task_name, | ||||
|                     "overwrite_task": False, | ||||
|                     "enabled": True, | ||||
|                     "trigger": "monthlydow", | ||||
|                     "multiple_instances": 1, | ||||
|                     "delete_expired_task_after": False, | ||||
|                     "start_when_available": False, | ||||
|                     "start_year": int(task1.run_time_date.strftime("%Y")), | ||||
|                     "start_month": int(task1.run_time_date.strftime("%-m")), | ||||
|                     "start_day": int(task1.run_time_date.strftime("%-d")), | ||||
|                     "start_hour": int(task1.run_time_date.strftime("%-H")), | ||||
|                     "start_min": int(task1.run_time_date.strftime("%-M")), | ||||
|                     "days_of_week": 56, | ||||
|                     "months_of_year": 0x400, | ||||
|                     "weeks_of_month": 3, | ||||
|                 }, | ||||
|             }, | ||||
|             timeout=5, | ||||
|         ) | ||||
|         nats_cmd.reset_mock() | ||||
|  | ||||
|         # test runonce with future date | ||||
|         task1 = baker.make( | ||||
|             "autotasks.AutomatedTask", | ||||
|             agent=agent, | ||||
|             name="test task 2", | ||||
|             task_type=TaskType.RUN_ONCE, | ||||
|             run_time_date=djangotime.now() + djangotime.timedelta(hours=22), | ||||
|             run_asap_after_missed=True, | ||||
|         ) | ||||
|         nats_cmd.return_value = "ok" | ||||
|         create_win_task_schedule(pk=task1.pk) | ||||
|         nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "schedtask", | ||||
|                 "schedtaskpayload": { | ||||
|                     "pk": task1.pk, | ||||
|                     "type": "rmm", | ||||
|                     "name": task1.win_task_name, | ||||
|                     "overwrite_task": False, | ||||
|                     "enabled": True, | ||||
|                     "trigger": "runonce", | ||||
|                     "multiple_instances": 1, | ||||
|                     "delete_expired_task_after": False, | ||||
|                     "start_when_available": True, | ||||
|                     "start_year": int(task1.run_time_date.strftime("%Y")), | ||||
|                     "start_month": int(task1.run_time_date.strftime("%-m")), | ||||
|                     "start_day": int(task1.run_time_date.strftime("%-d")), | ||||
|                     "start_hour": int(task1.run_time_date.strftime("%-H")), | ||||
|                     "start_min": int(task1.run_time_date.strftime("%-M")), | ||||
|                 }, | ||||
|             }, | ||||
|             timeout=5, | ||||
|         ) | ||||
|         nats_cmd.reset_mock() | ||||
|  | ||||
|         # test runonce with date in the past | ||||
|         task1 = baker.make( | ||||
|             "autotasks.AutomatedTask", | ||||
|             agent=agent, | ||||
|             name="test task 3", | ||||
|             task_type=TaskType.RUN_ONCE, | ||||
|             run_asap_after_missed=True, | ||||
|             run_time_date=djangotime.datetime(2018, 6, 1, 23, 23, 23), | ||||
|         ) | ||||
|         nats_cmd.return_value = "ok" | ||||
|         create_win_task_schedule(pk=task1.pk) | ||||
|         nats_cmd.assert_called() | ||||
|  | ||||
|         # check if task is scheduled for at most 5min in the future | ||||
|         _, args, _ = nats_cmd.mock_calls[0] | ||||
|  | ||||
|         current_minute = int(djangotime.now().strftime("%-M")) | ||||
|  | ||||
|         if current_minute >= 55 and current_minute < 60: | ||||
|             self.assertLess( | ||||
|                 args[0]["schedtaskpayload"]["start_min"], | ||||
|                 int(djangotime.now().strftime("%-M")), | ||||
|             ) | ||||
|         else: | ||||
|             self.assertGreater( | ||||
|                 args[0]["schedtaskpayload"]["start_min"], | ||||
|                 int(djangotime.now().strftime("%-M")), | ||||
|             ) | ||||
|  | ||||
|         # test checkfailure task | ||||
|         nats_cmd.reset_mock() | ||||
|         check = baker.make_recipe("checks.diskspace_check", agent=agent) | ||||
|         task1 = baker.make( | ||||
|             "autotasks.AutomatedTask", | ||||
|             agent=agent, | ||||
|             name="test task 4", | ||||
|             task_type=TaskType.CHECK_FAILURE, | ||||
|             assigned_check=check, | ||||
|         ) | ||||
|         nats_cmd.return_value = "ok" | ||||
|         create_win_task_schedule(pk=task1.pk) | ||||
|         nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "schedtask", | ||||
|                 "schedtaskpayload": { | ||||
|                     "pk": task1.pk, | ||||
|                     "type": "rmm", | ||||
|                     "name": task1.win_task_name, | ||||
|                     "overwrite_task": False, | ||||
|                     "enabled": True, | ||||
|                     "trigger": "manual", | ||||
|                     "multiple_instances": 1, | ||||
|                     "delete_expired_task_after": False, | ||||
|                     "start_when_available": False, | ||||
|                 }, | ||||
|             }, | ||||
|             timeout=5, | ||||
|         ) | ||||
|         nats_cmd.reset_mock() | ||||
|  | ||||
|         # test manual | ||||
|         task1 = AutomatedTask.objects.create( | ||||
|             agent=agent, | ||||
|             name="test task 5", | ||||
|             task_type=TaskType.MANUAL, | ||||
|         ) | ||||
|         nats_cmd.return_value = "ok" | ||||
|         create_win_task_schedule(pk=task1.pk) | ||||
|         nats_cmd.assert_called_with( | ||||
|             { | ||||
|                 "func": "schedtask", | ||||
|                 "schedtaskpayload": { | ||||
|                     "pk": task1.pk, | ||||
|                     "type": "rmm", | ||||
|                     "name": task1.win_task_name, | ||||
|                     "overwrite_task": False, | ||||
|                     "enabled": True, | ||||
|                     "trigger": "manual", | ||||
|                     "multiple_instances": 1, | ||||
|                     "delete_expired_task_after": False, | ||||
|                     "start_when_available": False, | ||||
|                 }, | ||||
|             }, | ||||
|             timeout=5, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class TestTaskPermissions(TacticalTestCase): | ||||
|     def setUp(self): | ||||
|         self.setup_coresettings() | ||||
|         self.client_setup() | ||||
|         self.setup_client() | ||||
|  | ||||
|     def test_get_tasks_permissions(self): | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
| @@ -709,7 +827,7 @@ class TestTaskPermissions(TacticalTestCase): | ||||
|         script = baker.make("scripts.Script") | ||||
|  | ||||
|         policy_data = { | ||||
|             "policy": policy.id,  # type: ignore | ||||
|             "policy": policy.id, | ||||
|             "name": "Test Task Manual", | ||||
|             "run_time_days": [], | ||||
|             "timeout": 120, | ||||
| @@ -855,9 +973,3 @@ class TestTaskPermissions(TacticalTestCase): | ||||
|  | ||||
|         self.check_authorized("post", url) | ||||
|         self.check_not_authorized("post", unauthorized_url) | ||||
|  | ||||
|     def test_policy_fields_to_copy_exists(self): | ||||
|         fields = [i.name for i in AutomatedTask._meta.get_fields()] | ||||
|         task = baker.make("autotasks.AutomatedTask") | ||||
|         for i in task.policy_fields_to_copy:  # type: ignore | ||||
|             self.assertIn(i, fields) | ||||
|   | ||||
| @@ -1,16 +1,17 @@ | ||||
| from agents.models import Agent | ||||
| from automation.models import Policy | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from rest_framework.exceptions import PermissionDenied | ||||
| from rest_framework.permissions import IsAuthenticated | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from agents.models import Agent | ||||
| from automation.models import Policy | ||||
| from tacticalrmm.permissions import _has_perm_on_agent | ||||
|  | ||||
| from .models import AutomatedTask | ||||
| from .permissions import AutoTaskPerms, RunAutoTaskPerms | ||||
| from .serializers import TaskSerializer | ||||
| from .tasks import remove_orphaned_win_tasks | ||||
|  | ||||
|  | ||||
| class GetAddAutoTasks(APIView): | ||||
| @@ -20,16 +21,15 @@ class GetAddAutoTasks(APIView): | ||||
|  | ||||
|         if agent_id: | ||||
|             agent = get_object_or_404(Agent, agent_id=agent_id) | ||||
|             tasks = AutomatedTask.objects.filter(agent=agent) | ||||
|             tasks = agent.get_tasks_with_policies() | ||||
|         elif policy: | ||||
|             policy = get_object_or_404(Policy, id=policy) | ||||
|             tasks = AutomatedTask.objects.filter(policy=policy) | ||||
|         else: | ||||
|             tasks = AutomatedTask.objects.filter_by_role(request.user) | ||||
|             tasks = AutomatedTask.objects.filter_by_role(request.user)  # type: ignore | ||||
|         return Response(TaskSerializer(tasks, many=True).data) | ||||
|  | ||||
|     def post(self, request): | ||||
|         from automation.tasks import generate_agent_autotasks_task | ||||
|         from autotasks.tasks import create_win_task_schedule | ||||
|  | ||||
|         data = request.data.copy() | ||||
| @@ -45,16 +45,11 @@ class GetAddAutoTasks(APIView): | ||||
|  | ||||
|         serializer = TaskSerializer(data=data) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         task = serializer.save( | ||||
|             win_task_name=AutomatedTask.generate_task_name(), | ||||
|         ) | ||||
|         task = serializer.save() | ||||
|  | ||||
|         if task.agent: | ||||
|             create_win_task_schedule.delay(pk=task.pk) | ||||
|  | ||||
|         elif task.policy: | ||||
|             generate_agent_autotasks_task.delay(policy=task.policy.pk) | ||||
|  | ||||
|         return Response( | ||||
|             "The task has been created. It will show up on the agent on next checkin" | ||||
|         ) | ||||
| @@ -86,7 +81,6 @@ class GetEditDeleteAutoTask(APIView): | ||||
|         return Response("The task was updated") | ||||
|  | ||||
|     def delete(self, request, pk): | ||||
|         from automation.tasks import delete_policy_autotasks_task | ||||
|         from autotasks.tasks import delete_win_task_schedule | ||||
|  | ||||
|         task = get_object_or_404(AutomatedTask, pk=pk) | ||||
| @@ -96,9 +90,9 @@ class GetEditDeleteAutoTask(APIView): | ||||
|  | ||||
|         if task.agent: | ||||
|             delete_win_task_schedule.delay(pk=task.pk) | ||||
|         elif task.policy: | ||||
|             delete_policy_autotasks_task.delay(task=task.pk) | ||||
|         else: | ||||
|             task.delete() | ||||
|             remove_orphaned_win_tasks.delay() | ||||
|  | ||||
|         return Response(f"{task.name} will be deleted shortly") | ||||
|  | ||||
| @@ -114,5 +108,14 @@ class RunAutoTask(APIView): | ||||
|         if task.agent and not _has_perm_on_agent(request.user, task.agent.agent_id): | ||||
|             raise PermissionDenied() | ||||
|  | ||||
|         run_win_task.delay(pk=pk) | ||||
|         return Response(f"{task.name} will now be run on {task.agent.hostname}") | ||||
|         # run policy task on agent | ||||
|         if "agent_id" in request.data.keys(): | ||||
|             if not _has_perm_on_agent(request.user, request.data["agent_id"]): | ||||
|                 raise PermissionDenied() | ||||
|  | ||||
|             run_win_task.delay(pk=pk, agent_id=request.data["agent_id"]) | ||||
|  | ||||
|         # run normal task on agent | ||||
|         else: | ||||
|             run_win_task.delay(pk=pk) | ||||
|         return Response(f"{task.name} will now be run.") | ||||
|   | ||||
| @@ -1,6 +1,7 @@ | ||||
| from django.contrib import admin | ||||
|  | ||||
| from .models import Check, CheckHistory | ||||
| from .models import Check, CheckHistory, CheckResult | ||||
|  | ||||
| admin.site.register(Check) | ||||
| admin.site.register(CheckHistory) | ||||
| admin.site.register(CheckResult) | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user