mirror of
https://github.com/wazuh/wazuh-docker.git
synced 2025-10-23 04:51:57 +00:00
Compare commits
352 Commits
2.0_5.4.2
...
cloud-v0.2
Author | SHA1 | Date | |
---|---|---|---|
|
a8af820ae1 | ||
|
70be87cec8 | ||
|
d8a90dc6b7 | ||
|
99d54f1776 | ||
|
33e451f755 | ||
|
d05ec226d8 | ||
|
3f206679da | ||
|
2f0bb8e43c | ||
|
c91681853e | ||
|
936b47ae57 | ||
|
3431411eaf | ||
|
503200ea70 | ||
|
a5013d2cf8 | ||
|
bc693841fd | ||
|
202e1669c5 | ||
|
9cdcf05d49 | ||
|
d15ea1ff51 | ||
|
ddd37f0f9a | ||
|
fdb55e8ce1 | ||
|
086ba71c69 | ||
|
303e0f6557 | ||
|
1d35f292db | ||
|
4c3f149428 | ||
|
7cb82937dc | ||
|
f494f6eca2 | ||
|
84a06e2fbc | ||
|
c346863593 | ||
|
dccb8aca54 | ||
|
18971e3fde | ||
|
7faed76e44 | ||
|
f3e3abfaf0 | ||
|
27c37d808a | ||
|
3a06c32e62 | ||
|
2918502fd1 | ||
|
d1eb6e7b98 | ||
|
6656fddf70 | ||
|
131d25979b | ||
|
abfe509753 | ||
|
9d71a6cbcc | ||
|
610f6f49ce | ||
|
71933d6625 | ||
|
7afe64b238 | ||
|
37f50dac1c | ||
|
7c11a8568c | ||
|
0bf9766883 | ||
|
59f60f63b6 | ||
|
c9ed007771 | ||
|
15dbd60605 | ||
|
eca30fb709 | ||
|
065b5bb5cf | ||
|
d98ab1b4f3 | ||
|
c077b496bd | ||
|
815039333d | ||
|
9b2ecdb47d | ||
|
651077e2c7 | ||
|
d8ac9e617b | ||
|
9db0001e08 | ||
|
7944897a0d | ||
|
771396ae9e | ||
|
09164c4285 | ||
|
948aaf289c | ||
|
d96e94f4fa | ||
|
8077b9b084 | ||
|
2e4f1ffe45 | ||
|
2c67ad822e | ||
|
096246abcb | ||
|
0cef2a5974 | ||
|
72725daa75 | ||
|
d2766454d0 | ||
|
eab0541d0a | ||
|
a88e5495d5 | ||
|
25cb3a82ea | ||
|
7bf7f532e0 | ||
|
dc70fb864d | ||
|
efab5fe0bd | ||
|
9db7bbd160 | ||
|
baa0ac6c22 | ||
|
99008b5e69 | ||
|
2eeb44f902 | ||
|
3f94f734d4 | ||
|
66adf7fd6f | ||
|
3f75dbdd39 | ||
|
3d8d612298 | ||
|
49ace342a0 | ||
|
5f2859d95e | ||
|
bf52145e7a | ||
|
f44ba2a745 | ||
|
61d3f460be | ||
|
97b018b86a | ||
|
9ed503b6e8 | ||
|
274d6248d3 | ||
|
b47f723285 | ||
|
b99d54eb25 | ||
|
2b0f2955d0 | ||
|
38644d380c | ||
|
86bc43a494 | ||
|
8e5ad87619 | ||
|
2bd0138d6f | ||
|
92b2814fb1 | ||
|
91e70da2b8 | ||
|
260762968d | ||
|
beb9bee27b | ||
|
49f6f673c6 | ||
|
1bc6ecca67 | ||
|
ebca6b3696 | ||
|
b15d61cda7 | ||
|
7aeb6b2050 | ||
|
11108631c0 | ||
|
62af977067 | ||
|
be9c278a18 | ||
|
92d957730c | ||
|
8823405dd9 | ||
|
73e5b99983 | ||
|
e563df4093 | ||
|
f3674ff9d9 | ||
|
12b40b48ee | ||
|
715fb4fdec | ||
|
fdca63f592 | ||
|
6a82a36711 | ||
|
18e955090a | ||
|
fc97c3623b | ||
|
283ca42d57 | ||
|
c6793657e7 | ||
|
b3114e7293 | ||
|
727858f74c | ||
|
48e0c75a26 | ||
|
ee7a16eb1a | ||
|
580251104c | ||
|
3fdba44bfa | ||
|
6ce25e00c9 | ||
|
699f2bb82e | ||
|
b7537453e3 | ||
|
9b0602766f | ||
|
e6062f28f3 | ||
|
e182e0d4f8 | ||
|
666708c47f | ||
|
d0df9a06e1 | ||
|
c1a33b7185 | ||
|
b06e4c4a5e | ||
|
7e2549a85a | ||
|
f8dada12b1 | ||
|
532b691172 | ||
|
3b7705e868 | ||
|
3dce66e869 | ||
|
90738fb148 | ||
|
3ef08ccf66 | ||
|
10ae694a92 | ||
|
bc18ef46e6 | ||
|
d149327275 | ||
|
fdd2c9d2bf | ||
|
89e6af0d9a | ||
|
9a841fdbd3 | ||
|
db93ca7b05 | ||
|
649489297b | ||
|
22eeee3592 | ||
|
15f35ca6a8 | ||
|
159b3c02f0 | ||
|
cb10fd20ba | ||
|
c79a81f601 | ||
|
4687360677 | ||
|
a6b7d6ea02 | ||
|
2e3b49dcad | ||
|
02895ec707 | ||
|
302286a29e | ||
|
024d25236a | ||
|
a334450470 | ||
|
a10fa157aa | ||
|
ffe3dde43a | ||
|
f3cc91fdf6 | ||
|
cb2e49eb54 | ||
|
046b2f049b | ||
|
a654c97b47 | ||
|
d1c8d72bc3 | ||
|
c195f38458 | ||
|
01296a6c9e | ||
|
596788ff09 | ||
|
dcf644e37a | ||
|
4db1c4e073 | ||
|
2f74ec6fdb | ||
|
e85c9419b7 | ||
|
e184e7c692 | ||
|
72acec2ddc | ||
|
eed4b0b9dc | ||
|
986635c366 | ||
|
0ab0db67ba | ||
|
8913df6284 | ||
|
b8294dba69 | ||
|
a2c0053ce8 | ||
|
5123f92551 | ||
|
ed913c1e71 | ||
|
81035c39db | ||
|
70e491fa6f | ||
|
57fd4d8859 | ||
|
7c9ee9b256 | ||
|
f3655b1360 | ||
|
0cc8be2142 | ||
|
dff13dfc7a | ||
|
ab90a9a95b | ||
|
7a9b32fbd9 | ||
|
ef5fbe15a5 | ||
|
a8e1661aa6 | ||
|
c7abb4239f | ||
|
68b4703f7a | ||
|
2e66d5f3ee | ||
|
020047aa8f | ||
|
e275dc9446 | ||
|
2a03d08a5b | ||
|
40a74df00d | ||
|
05fa996ffd | ||
|
edd2e250e8 | ||
|
5e3b25aa95 | ||
|
49663b71bb | ||
|
77f123460e | ||
|
458bfcde09 | ||
|
12bb0cba4a | ||
|
608b25df4c | ||
|
7cce0d9c9e | ||
|
cd0d180c93 | ||
|
16335e1f70 | ||
|
e3e81a4671 | ||
|
7e83951672 | ||
|
31b43aa2f1 | ||
|
6b3018a56b | ||
|
6fbae577dd | ||
|
5b5c6200bb | ||
|
63a85e84a8 | ||
|
bd162140a9 | ||
|
b140600d25 | ||
|
f5037c392e | ||
|
9ef497f426 | ||
|
aa38747e7c | ||
|
322c9520f9 | ||
|
6f355f0405 | ||
|
2d1f61e5b5 | ||
|
fb0409697f | ||
|
c4237ad894 | ||
|
002cf2fbff | ||
|
9f0643f456 | ||
|
69da45fc9d | ||
|
a441251d1a | ||
|
1c0d2e2147 | ||
|
59831ffb07 | ||
|
92cfd70ad0 | ||
|
ee885299c7 | ||
|
dd98a6d849 | ||
|
3668ff8842 | ||
|
4eb99e978c | ||
|
440bbbe49b | ||
|
8e4386b8ac | ||
|
89ff11e9b3 | ||
|
e373fac732 | ||
|
b027b32a34 | ||
|
b23c195f1b | ||
|
7d777ada17 | ||
|
c6165e1521 | ||
|
14db93571e | ||
|
6652736b62 | ||
|
6389b57a50 | ||
|
d548f4e547 | ||
|
fc93bd5230 | ||
|
cc18838481 | ||
|
373e6ee493 | ||
|
80184b5c49 | ||
|
f308ddcaab | ||
|
d6bbc3db77 | ||
|
d1670c5fe9 | ||
|
f14642ac1a | ||
|
04e0d4793a | ||
|
d514ab7830 | ||
|
9ef39510fc | ||
|
9a3a89abdc | ||
|
8ddcda6e84 | ||
|
915a395557 | ||
|
b927c98585 | ||
|
74c2948bc8 | ||
|
b702c67865 | ||
|
4575c30a00 | ||
|
5c39d1f0ea | ||
|
357a17e791 | ||
|
f1a2762984 | ||
|
7200d6f9c2 | ||
|
23d0cb7f63 | ||
|
e03b222f05 | ||
|
4050621326 | ||
|
36cc2607a7 | ||
|
b91e9ba308 | ||
|
9829b98cae | ||
|
073bf284f3 | ||
|
6dacfbcc40 | ||
|
2f91f5aa10 | ||
|
2016322c0a | ||
|
fec53979ea | ||
|
ee3ff4847b | ||
|
290ea29c1d | ||
|
711c3c0f8e | ||
|
70171d490d | ||
|
0df2367e7a | ||
|
efb5f9ef04 | ||
|
97c7b82aec | ||
|
a9e16e79a9 | ||
|
9294617a0e | ||
|
8408f401d5 | ||
|
575708310b | ||
|
15f7ce98d9 | ||
|
fd18a00429 | ||
|
9a4c409a0a | ||
|
57490a50bd | ||
|
62741c639f | ||
|
043f8f18de | ||
|
ee74f01cba | ||
|
e685128b51 | ||
|
8f40340dda | ||
|
76945a2698 | ||
|
98007ea2f4 | ||
|
b081ff3bc7 | ||
|
716667be46 | ||
|
2b3f71aa10 | ||
|
74dd541bd8 | ||
|
8a051b67b0 | ||
|
7da29fa6a9 | ||
|
ca1a1bd883 | ||
|
d8fe59901a | ||
|
3cae6fe61d | ||
|
a26f119c73 | ||
|
3d813cb2fe | ||
|
5c7454270e | ||
|
b8ef822f85 | ||
|
e341391201 | ||
|
c42898e862 | ||
|
2663de28a6 | ||
|
d1adafdcde | ||
|
a866f41ecf | ||
|
97a042cfcd | ||
|
845398d7c7 | ||
|
6e6912c380 | ||
|
a2ba029918 | ||
|
160bf4bbe9 | ||
|
a70c127228 | ||
|
c2213165f2 | ||
|
d0565d913a | ||
|
d1cb67a822 | ||
|
e69d9d0efc | ||
|
08824ad4a9 | ||
|
a4d4c40ad5 | ||
|
84005d8145 | ||
|
aef418c75e | ||
|
5cffb99d67 | ||
|
1c935bbf07 | ||
|
38608d1f26 | ||
|
eae7328f16 | ||
|
82ef76ed4d | ||
|
548a738d69 |
219
CHANGELOG.md
Normal file
219
CHANGELOG.md
Normal file
@@ -0,0 +1,219 @@
|
||||
# Change Log
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## Wazuh Docker v3.10.2_7.3.2
|
||||
|
||||
### Added
|
||||
|
||||
- Update to Wazuh version 3.10.2_7.3.2
|
||||
|
||||
## Wazuh Docker v3.10.0_7.3.2
|
||||
|
||||
### Added
|
||||
|
||||
- Update to Wazuh version 3.10.0_7.3.2
|
||||
|
||||
## Wazuh Docker v3.9.5_7.2.1
|
||||
|
||||
### Added
|
||||
|
||||
- Update to Wazuh version 3.9.5_7.2.1
|
||||
|
||||
## Wazuh Docker v3.9.4_7.2.0
|
||||
|
||||
### Added
|
||||
|
||||
- Update to Wazuh version 3.9.4_7.2.0
|
||||
- Implemented Wazuh Filebeat Module ([jm404](https://www.github.com/jm404)) [#2a77c6a](https://github.com/wazuh/wazuh-docker/commit/2a77c6a6e6bf78f2492adeedbade7a507d9974b2)
|
||||
|
||||
|
||||
## Wazuh Docker v3.9.3_7.2.0
|
||||
|
||||
### Fixed
|
||||
- Wazuh-docker reinserts cluster settings after resuming containers ([@manuasir](https://github.com/manuasir)) [#213](https://github.com/wazuh/wazuh-docker/pull/213)
|
||||
|
||||
## Wazuh Docker v3.9.2_7.1.1
|
||||
|
||||
### Added
|
||||
|
||||
- Update to Wazuh version 3.9.2_7.1.1
|
||||
|
||||
## Wazuh Docker v3.9.3_6.8.1
|
||||
|
||||
### Added
|
||||
|
||||
- Update to Wazuh version 3.9.3_6.8.1
|
||||
- Option to disable additionals X-Pack applications and hide unnecesary management links ([@SitoRBJ](https://github.com/SitoRBJ)) ([#163](https://github.com/wazuh/wazuh-docker/pull/163))
|
||||
|
||||
|
||||
## Wazuh Docker v3.9.2_6.8.0
|
||||
|
||||
### Added
|
||||
|
||||
- Update to Wazuh version 3.9.2_6.8.0
|
||||
|
||||
|
||||
## Wazuh Docker v3.9.1_7.1.0
|
||||
|
||||
### Added
|
||||
|
||||
- Support for Elastic v7.1.0
|
||||
- New environment variables for Kibana ([@manuasir](https://github.com/manuasir)) [#22ad43](https://github.com/wazuh/wazuh-docker/commit/22ad4360f548e54bb0c5e929f8c84a186ad2ab88)
|
||||
|
||||
## Wazuh Docker v3.9.1_6.8.0
|
||||
|
||||
### Added
|
||||
|
||||
- Update to Wazuh version 3.9.1_6.8.0 ([#181](https://github.com/wazuh/wazuh-docker/pull/181))
|
||||
- Security for Elastic Stack in Docker implemented ([#186](https://github.com/wazuh/wazuh-docker/issues/186))
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed `ELASTICSEARCH_KIBANA_IP` environment variable ([@manuasir](https://github.com/manuasir)) ([#181](https://github.com/wazuh/wazuh-docker/pull/181))
|
||||
|
||||
|
||||
## Wazuh Docker v3.9.1_7.1.0
|
||||
|
||||
### Added
|
||||
|
||||
- Support for Elastic v7.1.0
|
||||
- New environment variables for Kibana ([@manuasir](https://github.com/manuasir)) [#22ad43](https://github.com/wazuh/wazuh-docker/commit/22ad4360f548e54bb0c5e929f8c84a186ad2ab88)
|
||||
|
||||
|
||||
## Wazuh Docker v3.9.0_6.7.2
|
||||
|
||||
### Changed
|
||||
|
||||
- Update Elastic Stack version to 6.7.2.
|
||||
|
||||
## Wazuh Docker v3.9.0_6.7.1
|
||||
|
||||
|
||||
### Added
|
||||
|
||||
- Support for xPACK authorized requests ([@manuasir](https://github.com/manuasir)) ([#119](https://github.com/wazuh/wazuh-docker/pull/119))
|
||||
- Add Elasticsearch cluster configuration ([@SitoRBJ](https://github.com/SitoRBJ)). ([#146](https://github.com/wazuh/wazuh-docker/pull/146))
|
||||
- Add Elasticsearch cluster configuration ([@Phandora](https://github.com/Phandora)) ([#140](https://github.com/wazuh/wazuh-docker/pull/140))
|
||||
- Setting Nginx to support several user/passwords in Kibana ([@toniMR](https://github.com/toniMR)) ([#136](https://github.com/wazuh/wazuh-docker/pull/136))
|
||||
|
||||
|
||||
### Changed
|
||||
|
||||
- Use LS_JAVA_OPTS instead of old LS_HEAP_SIZE ([@ruffy91](https://github.com/ruffy91)) ([#139](https://github.com/wazuh/wazuh-docker/pull/139))
|
||||
- Changing the original Wazuh docker image to allow adding code in the entrypoint ([@Phandora](https://github.com/phandora)) ([#151](https://github.com/wazuh/wazuh-docker/pull/151))
|
||||
|
||||
### Removed
|
||||
|
||||
- Removing files from Wazuh image ([@Phandora](https://github.com/phandora)) ([#153](https://github.com/wazuh/wazuh-docker/pull/153))
|
||||
|
||||
## Wazuh Docker v3.8.2_6.7.0
|
||||
|
||||
### Changed
|
||||
|
||||
- Update Elastic Stack version to 6.7.0. ([#144](https://github.com/wazuh/wazuh-docker/pull/144))
|
||||
|
||||
## Wazuh Docker v3.8.2_6.6.2
|
||||
|
||||
### Changed
|
||||
|
||||
- Update Elastic Stack version to 6.6.2. ([#130](https://github.com/wazuh/wazuh-docker/pull/130))
|
||||
|
||||
## Wazuh Docker v3.8.2_6.6.1
|
||||
|
||||
### Changed
|
||||
|
||||
- Update Elastic Stack version to 6.6.1. ([#129](https://github.com/wazuh/wazuh-docker/pull/129))
|
||||
|
||||
## Wazuh Docker v3.8.2_6.5.4
|
||||
|
||||
### Added
|
||||
|
||||
- Add Wazuh-Elasticsearch. ([#106](https://github.com/wazuh/wazuh-docker/pull/106))
|
||||
- Store Filebeat _/var/lib/filebeat/registry._ ([#109](https://github.com/wazuh/wazuh-docker/pull/109))
|
||||
- Adding the option to disable some xpack features. ([#111](https://github.com/wazuh/wazuh-docker/pull/111))
|
||||
- Wazuh-Kibana customizable at plugin level. ([#117](https://github.com/wazuh/wazuh-docker/pull/117))
|
||||
- Adding env variables for alerts data flow. ([#118](https://github.com/wazuh/wazuh-docker/pull/118))
|
||||
- New Logstash entrypoint added. ([#135](https://github.com/wazuh/wazuh-docker/pull/135/files))
|
||||
- Welcome screen management. ([#133](https://github.com/wazuh/wazuh-docker/pull/133))
|
||||
|
||||
### Changed
|
||||
|
||||
- Update to Wazuh version 3.8.2. ([#105](https://github.com/wazuh/wazuh-docker/pull/105))
|
||||
|
||||
### Removed
|
||||
|
||||
- Remove alerts created in build time. ([#137](https://github.com/wazuh/wazuh-docker/pull/137))
|
||||
|
||||
|
||||
## Wazuh Docker v3.8.1_6.5.4
|
||||
|
||||
### Changed
|
||||
- Update to Wazuh version 3.8.1. ([#102](https://github.com/wazuh/wazuh-docker/pull/102))
|
||||
|
||||
## Wazuh Docker v3.8.0_6.5.4
|
||||
|
||||
### Changed
|
||||
|
||||
- Upgrade version 3.8.0_6.5.4. ([#97](https://github.com/wazuh/wazuh-docker/pull/97))
|
||||
|
||||
### Removed
|
||||
|
||||
- Remove cluster.py work around. ([#99](https://github.com/wazuh/wazuh-docker/pull/99))
|
||||
|
||||
## Wazuh Docker v3.7.2_6.5.4
|
||||
|
||||
### Added
|
||||
|
||||
- Improvements to Kibana settings added. ([#91](https://github.com/wazuh/wazuh-docker/pull/91))
|
||||
- Add Kibana environmental variables for Wazuh APP config.yml. ([#89](https://github.com/wazuh/wazuh-docker/pull/89))
|
||||
|
||||
### Changed
|
||||
|
||||
- Update Elastic Stack version to 6.5.4. ([#82](https://github.com/wazuh/wazuh-docker/pull/82))
|
||||
- Add env credentials for nginx. ([#86](https://github.com/wazuh/wazuh-docker/pull/86))
|
||||
- Improve filebeat configuration ([#88](https://github.com/wazuh/wazuh-docker/pull/88))
|
||||
|
||||
### Fixed
|
||||
|
||||
- Temporary fix for Wazuh cluster master node in Kubernetes. ([#84](https://github.com/wazuh/wazuh-docker/pull/84))
|
||||
|
||||
## Wazuh Docker v3.7.2_6.5.3
|
||||
|
||||
### Changed
|
||||
|
||||
- Erasing temporary fix for AWS integration. ([#81](https://github.com/wazuh/wazuh-docker/pull/81))
|
||||
|
||||
### Fixed
|
||||
|
||||
- Upgrading errors due to wrong files. ([#80](https://github.com/wazuh/wazuh-docker/pull/80))
|
||||
|
||||
|
||||
## Wazuh Docker v3.7.0_6.5.0
|
||||
|
||||
### Changed
|
||||
|
||||
- Adapt to Elastic stack 6.5.0.
|
||||
|
||||
## Wazuh Docker v3.7.0_6.4.3
|
||||
|
||||
### Added
|
||||
|
||||
- Allow custom scripts or commands before service start ([#58](https://github.com/wazuh/wazuh-docker/pull/58))
|
||||
- Added description for wazuh-nginx ([#59](https://github.com/wazuh/wazuh-docker/pull/59))
|
||||
- Added license file to match https://github.com/wazuh/wazuh LICENSE ([#60](https://github.com/wazuh/wazuh-docker/pull/60))
|
||||
- Added SMTP packages ([#67](https://github.com/wazuh/wazuh-docker/pull/67))
|
||||
|
||||
### Changed
|
||||
|
||||
- Increased proxy buffer for NGINX Kibana ([#51](https://github.com/wazuh/wazuh-docker/pull/51))
|
||||
- Updated logstash config to remove deprecation warnings ([#55](https://github.com/wazuh/wazuh-docker/pull/55))
|
||||
- Set ossec user's home path ([#61](https://github.com/wazuh/wazuh-docker/pull/61))
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed a bug that prevents the API from starting when the Wazuh manager was updated. Change in the files that are stored in the volume. ([#65](https://github.com/wazuh/wazuh-docker/pull/65))
|
||||
- Fixed script reference ([#62](https://github.com/wazuh/wazuh-docker/pull/62/files))
|
||||
|
||||
## Wazuh Docker v3.6.1_6.4.3
|
||||
|
||||
Wazuh-Docker starting point.
|
475
LICENSE
Normal file
475
LICENSE
Normal file
@@ -0,0 +1,475 @@
|
||||
|
||||
Portions Copyright (C) 2019 Wazuh, Inc.
|
||||
Based on work Copyright (C) 2003 - 2013 Trend Micro, Inc.
|
||||
|
||||
This program is a free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License (version 2) as
|
||||
published by the FSF - Free Software Foundation.
|
||||
|
||||
In addition, certain source files in this program permit linking with the
|
||||
OpenSSL library (http://www.openssl.org), which otherwise wouldn't be allowed
|
||||
under the GPL. For purposes of identifying OpenSSL, most source files giving
|
||||
this permission limit it to versions of OpenSSL having a license identical to
|
||||
that listed in this file (see section "OpenSSL LICENSE" below). It is not
|
||||
necessary for the copyright years to match between this file and the OpenSSL
|
||||
version in question. However, note that because this file is an extension of
|
||||
the license statements of these source files, this file may not be changed
|
||||
except with permission from all copyright holders of source files in this
|
||||
program which reference this file.
|
||||
|
||||
Note that this license applies to the source code, as well as
|
||||
decoders, rules and any other data file included with OSSEC (unless
|
||||
otherwise specified).
|
||||
|
||||
For the purpose of this license, we consider an application to constitute a
|
||||
"derivative work" or a work based on this program if it does any of the
|
||||
following (list not exclusive):
|
||||
|
||||
* Integrates source code/data files from OSSEC.
|
||||
* Includes OSSEC copyrighted material.
|
||||
* Includes/integrates OSSEC into a proprietary executable installer.
|
||||
* Links to a library or executes a program that does any of the above.
|
||||
|
||||
This list is not exclusive, but just a clarification of our interpretation
|
||||
of derived works. These restrictions only apply if you actually redistribute
|
||||
OSSEC (or parts of it).
|
||||
|
||||
We don't consider these to be added restrictions on top of the GPL,
|
||||
but just a clarification of how we interpret "derived works" as it
|
||||
applies to OSSEC. This is similar to the way Linus Torvalds has
|
||||
announced his interpretation of how "derived works" applies to Linux kernel
|
||||
modules. Our interpretation refers only to OSSEC - we don't speak
|
||||
for any other GPL products.
|
||||
|
||||
* As a special exception, the copyright holders give
|
||||
* permission to link the code of portions of this program with the
|
||||
* OpenSSL library under certain conditions as described in each
|
||||
* individual source file, and distribute linked combinations
|
||||
* including the two.
|
||||
* You must obey the GNU General Public License in all respects
|
||||
* for all of the code used other than OpenSSL. If you modify
|
||||
* file(s) with this exception, you may extend this exception to your
|
||||
* version of the file(s), but you are not obligated to do so. If you
|
||||
* do not wish to do so, delete this exception statement from your
|
||||
* version. If you delete this exception statement from all source
|
||||
* files in the program, then also delete it here.
|
||||
|
||||
OSSEC HIDS is distributed in the hope that it will be useful, but WITHOUT
|
||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
FITNESS FOR A PARTICULAR PURPOSE.
|
||||
See the GNU General Public License Version 2 below for more details.
|
||||
|
||||
-----------------------------------------------------------------------------
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 2, June 1991
|
||||
|
||||
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
License is intended to guarantee your freedom to share and change free
|
||||
software--to make sure the software is free for all its users. This
|
||||
General Public License applies to most of the Free Software
|
||||
Foundation's software and to any other program whose authors commit to
|
||||
using it. (Some other Free Software Foundation software is covered by
|
||||
the GNU Lesser General Public License instead.) You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
this service if you wish), that you receive source code or can get it
|
||||
if you want it, that you can change the software or use pieces of it
|
||||
in new free programs; and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
anyone to deny you these rights or to ask you to surrender the rights.
|
||||
These restrictions translate to certain responsibilities for you if you
|
||||
distribute copies of the software, or if you modify it.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must give the recipients all the rights that
|
||||
you have. You must make sure that they, too, receive or can get the
|
||||
source code. And you must show them these terms so they know their
|
||||
rights.
|
||||
|
||||
We protect your rights with two steps: (1) copyright the software, and
|
||||
(2) offer you this license which gives you legal permission to copy,
|
||||
distribute and/or modify the software.
|
||||
|
||||
Also, for each author's protection and ours, we want to make certain
|
||||
that everyone understands that there is no warranty for this free
|
||||
software. If the software is modified by someone else and passed on, we
|
||||
want its recipients to know that what they have is not the original, so
|
||||
that any problems introduced by others will not reflect on the original
|
||||
authors' reputations.
|
||||
|
||||
Finally, any free program is threatened constantly by software
|
||||
patents. We wish to avoid the danger that redistributors of a free
|
||||
program will individually obtain patent licenses, in effect making the
|
||||
program proprietary. To prevent this, we have made it clear that any
|
||||
patent must be licensed for everyone's free use or not licensed at all.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License applies to any program or other work which contains
|
||||
a notice placed by the copyright holder saying it may be distributed
|
||||
under the terms of this General Public License. The "Program", below,
|
||||
refers to any such program or work, and a "work based on the Program"
|
||||
means either the Program or any derivative work under copyright law:
|
||||
that is to say, a work containing the Program or a portion of it,
|
||||
either verbatim or with modifications and/or translated into another
|
||||
language. (Hereinafter, translation is included without limitation in
|
||||
the term "modification".) Each licensee is addressed as "you".
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running the Program is not restricted, and the output from the Program
|
||||
is covered only if its contents constitute a work based on the
|
||||
Program (independent of having been made by running the Program).
|
||||
Whether that is true depends on what the Program does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Program's
|
||||
source code as you receive it, in any medium, provided that you
|
||||
conspicuously and appropriately publish on each copy an appropriate
|
||||
copyright notice and disclaimer of warranty; keep intact all the
|
||||
notices that refer to this License and to the absence of any warranty;
|
||||
and give any other recipients of the Program a copy of this License
|
||||
along with the Program.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy, and
|
||||
you may at your option offer warranty protection in exchange for a fee.
|
||||
|
||||
2. You may modify your copy or copies of the Program or any portion
|
||||
of it, thus forming a work based on the Program, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) You must cause the modified files to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
b) You must cause any work that you distribute or publish, that in
|
||||
whole or in part contains or is derived from the Program or any
|
||||
part thereof, to be licensed as a whole at no charge to all third
|
||||
parties under the terms of this License.
|
||||
|
||||
c) If the modified program normally reads commands interactively
|
||||
when run, you must cause it, when started running for such
|
||||
interactive use in the most ordinary way, to print or display an
|
||||
announcement including an appropriate copyright notice and a
|
||||
notice that there is no warranty (or else, saying that you provide
|
||||
a warranty) and that users may redistribute the program under
|
||||
these conditions, and telling the user how to view a copy of this
|
||||
License. (Exception: if the Program itself is interactive but
|
||||
does not normally print such an announcement, your work based on
|
||||
the Program is not required to print an announcement.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Program,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Program, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Program.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Program
|
||||
with the Program (or with a work based on the Program) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may copy and distribute the Program (or a work based on it,
|
||||
under Section 2) in object code or executable form under the terms of
|
||||
Sections 1 and 2 above provided that you also do one of the following:
|
||||
|
||||
a) Accompany it with the complete corresponding machine-readable
|
||||
source code, which must be distributed under the terms of Sections
|
||||
1 and 2 above on a medium customarily used for software interchange; or,
|
||||
|
||||
b) Accompany it with a written offer, valid for at least three
|
||||
years, to give any third party, for a charge no more than your
|
||||
cost of physically performing source distribution, a complete
|
||||
machine-readable copy of the corresponding source code, to be
|
||||
distributed under the terms of Sections 1 and 2 above on a medium
|
||||
customarily used for software interchange; or,
|
||||
|
||||
c) Accompany it with the information you received as to the offer
|
||||
to distribute corresponding source code. (This alternative is
|
||||
allowed only for noncommercial distribution and only if you
|
||||
received the program in object code or executable form with such
|
||||
an offer, in accord with Subsection b above.)
|
||||
|
||||
The source code for a work means the preferred form of the work for
|
||||
making modifications to it. For an executable work, complete source
|
||||
code means all the source code for all modules it contains, plus any
|
||||
associated interface definition files, plus the scripts used to
|
||||
control compilation and installation of the executable. However, as a
|
||||
special exception, the source code distributed need not include
|
||||
anything that is normally distributed (in either source or binary
|
||||
form) with the major components (compiler, kernel, and so on) of the
|
||||
operating system on which the executable runs, unless that component
|
||||
itself accompanies the executable.
|
||||
|
||||
If distribution of executable or object code is made by offering
|
||||
access to copy from a designated place, then offering equivalent
|
||||
access to copy the source code from the same place counts as
|
||||
distribution of the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
4. You may not copy, modify, sublicense, or distribute the Program
|
||||
except as expressly provided under this License. Any attempt
|
||||
otherwise to copy, modify, sublicense or distribute the Program is
|
||||
void, and will automatically terminate your rights under this License.
|
||||
However, parties who have received copies, or rights, from you under
|
||||
this License will not have their licenses terminated so long as such
|
||||
parties remain in full compliance.
|
||||
|
||||
5. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Program or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Program (or any work based on the
|
||||
Program), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Program or works based on it.
|
||||
|
||||
6. Each time you redistribute the Program (or any work based on the
|
||||
Program), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute or modify the Program subject to
|
||||
these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties to
|
||||
this License.
|
||||
|
||||
7. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Program at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Program by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Program.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under
|
||||
any particular circumstance, the balance of the section is intended to
|
||||
apply and the section as a whole is intended to apply in other
|
||||
circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system, which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
8. If the distribution and/or use of the Program is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Program under this License
|
||||
may add an explicit geographical distribution limitation excluding
|
||||
those countries, so that distribution is permitted only in or among
|
||||
countries not thus excluded. In such case, this License incorporates
|
||||
the limitation as if written in the body of this License.
|
||||
|
||||
9. The Free Software Foundation may publish revised and/or new versions
|
||||
of the General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program
|
||||
specifies a version number of this License which applies to it and "any
|
||||
later version", you have the option of following the terms and conditions
|
||||
either of that version or of any later version published by the Free
|
||||
Software Foundation. If the Program does not specify a version number of
|
||||
this License, you may choose any version ever published by the Free Software
|
||||
Foundation.
|
||||
|
||||
10. If you wish to incorporate parts of the Program into other free
|
||||
programs whose distribution conditions are different, write to the author
|
||||
to ask for permission. For software which is copyrighted by the Free
|
||||
Software Foundation, write to the Free Software Foundation; we sometimes
|
||||
make exceptions for this. Our decision will be guided by the two goals
|
||||
of preserving the free status of all derivatives of our free software and
|
||||
of promoting the sharing and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
|
||||
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
|
||||
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
|
||||
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
|
||||
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
|
||||
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
|
||||
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
|
||||
REPAIR OR CORRECTION.
|
||||
|
||||
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
|
||||
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
|
||||
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
|
||||
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
|
||||
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
|
||||
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
OpenSSL License
|
||||
---------------
|
||||
|
||||
LICENSE ISSUES
|
||||
==============
|
||||
|
||||
The OpenSSL toolkit stays under a dual license, i.e. both the conditions of
|
||||
the OpenSSL License and the original SSLeay license apply to the toolkit.
|
||||
See below for the actual license texts. Actually both licenses are BSD-style
|
||||
Open Source licenses. In case of any license issues related to OpenSSL
|
||||
please contact openssl-core@openssl.org.
|
||||
|
||||
OpenSSL License
|
||||
---------------
|
||||
|
||||
/* ====================================================================
|
||||
* Copyright (c) 1998-2001 The OpenSSL Project. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
*
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
*
|
||||
* 3. All advertising materials mentioning features or use of this
|
||||
* software must display the following acknowledgment:
|
||||
* "This product includes software developed by the OpenSSL Project
|
||||
* for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
|
||||
*
|
||||
* 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
|
||||
* endorse or promote products derived from this software without
|
||||
* prior written permission. For written permission, please contact
|
||||
* openssl-core@openssl.org.
|
||||
*
|
||||
* 5. Products derived from this software may not be called "OpenSSL"
|
||||
* nor may "OpenSSL" appear in their names without prior written
|
||||
* permission of the OpenSSL Project.
|
||||
*
|
||||
* 6. Redistributions of any form whatsoever must retain the following
|
||||
* acknowledgment:
|
||||
* "This product includes software developed by the OpenSSL Project
|
||||
* for use in the OpenSSL Toolkit (http://www.openssl.org/)"
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
|
||||
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
|
||||
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
|
||||
* OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* ====================================================================
|
||||
*
|
||||
* This product includes cryptographic software written by Eric Young
|
||||
* (eay@cryptsoft.com). This product includes software written by Tim
|
||||
* Hudson (tjh@cryptsoft.com).
|
||||
*
|
||||
*/
|
||||
|
||||
Original SSLeay License
|
||||
-----------------------
|
||||
|
||||
/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
|
||||
* All rights reserved.
|
||||
*
|
||||
* This package is an SSL implementation written
|
||||
* by Eric Young (eay@cryptsoft.com).
|
||||
* The implementation was written so as to conform with Netscapes SSL.
|
||||
*
|
||||
* This library is free for commercial and non-commercial use as long as
|
||||
* the following conditions are aheared to. The following conditions
|
||||
* apply to all code found in this distribution, be it the RC4, RSA,
|
||||
* lhash, DES, etc., code; not just the SSL code. The SSL documentation
|
||||
* included with this distribution is covered by the same copyright terms
|
||||
* except that the holder is Tim Hudson (tjh@cryptsoft.com).
|
||||
*
|
||||
* Copyright remains Eric Young's, and as such any Copyright notices in
|
||||
* the code are not to be removed.
|
||||
* If this package is used in a product, Eric Young should be given attribution
|
||||
* as the author of the parts of the library used.
|
||||
* This can be in the form of a textual message at program startup or
|
||||
* in documentation (online or textual) provided with the package.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
* 1. Redistributions of source code must retain the copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. All advertising materials mentioning features or use of this software
|
||||
* must display the following acknowledgement:
|
||||
* "This product includes cryptographic software written by
|
||||
* Eric Young (eay@cryptsoft.com)"
|
||||
* The word 'cryptographic' can be left out if the routines from the library
|
||||
* being used are not cryptographic related :-).
|
||||
* 4. If you include any Windows specific code (or a derivative thereof) from
|
||||
* the apps directory (application code) you must include an acknowledgement:
|
||||
* "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
|
||||
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
|
||||
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
* SUCH DAMAGE.
|
||||
*
|
||||
* The licence and distribution terms for any publically available version or
|
||||
* derivative of this code cannot be changed. i.e. this code cannot simply be
|
||||
* copied and put under another distribution licence
|
||||
* [including the GNU Public Licence.]
|
||||
*/
|
78
README.md
78
README.md
@@ -1,21 +1,77 @@
|
||||
# IMPORTANT NOTE
|
||||
# Wazuh containers for Docker
|
||||
|
||||
The first time than you runt this container can take a while until kibana finish the configuration, the Wazuh plugin can take a few minutes until finish the instalation, please be patient.
|
||||
[](https://wazuh.com/community/join-us-on-slack/)
|
||||
[](https://groups.google.com/forum/#!forum/wazuh)
|
||||
[](https://documentation.wazuh.com)
|
||||
[](https://wazuh.com)
|
||||
|
||||
# Docker container Wazuh 2.0 + ELK(5.4.2)
|
||||
In this repository you will find the containers to run:
|
||||
|
||||
This Docker container source files can be found in our [Wazuh Github repository](https://github.com/wazuh/wazuh). It includes both an OSSEC manager and an Elasticsearch single-node cluster, with Logstash and Kibana. You can find more information on how these components work together in our documentation.
|
||||
* wazuh: It runs the Wazuh manager, Wazuh API and Filebeat (for integration with Elastic Stack)
|
||||
* wazuh-kibana: Provides a web user interface to browse through alerts data. It includes Wazuh plugin for Kibana, that allows you to visualize agents configuration and status.
|
||||
* wazuh-nginx: Proxies the Kibana container, adding HTTPS (via self-signed SSL certificate) and [Basic authentication](https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication#Basic_authentication_scheme).
|
||||
* wazuh-elasticsearch: An Elasticsearch container (working as a single-node cluster) using Elastic Stack Docker images. **Be aware to increase the `vm.max_map_count` setting, as it's detailed in the [Wazuh documentation](https://documentation.wazuh.com/current/docker/wazuh-container.html#increase-max-map-count-on-your-host-linux).**
|
||||
|
||||
In addition, a docker-compose file is provided to launch the containers mentioned above.
|
||||
|
||||
* Elasticsearch cluster. In the Elasticsearch Dockerfile we can visualize variables to configure an Elasticsearch Cluster. These variables are used in the file *config_cluster.sh* to set them in the *elasticsearch.yml* configuration file. You can see the meaning of the node variables [here](https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-node.html) and other cluster settings [here](https://github.com/elastic/elasticsearch/blob/master/distribution/src/config/elasticsearch.yml).
|
||||
|
||||
## Documentation
|
||||
|
||||
* [Full documentation](http://documentation.wazuh.com)
|
||||
* [Wazuh-docker module documentation](https://documentation.wazuh.com/current/docker/index.html)
|
||||
* [Hub docker](https://hub.docker.com/u/wazuh)
|
||||
* [Wazuh full documentation](http://documentation.wazuh.com)
|
||||
* [Wazuh documentation for Docker](https://documentation.wazuh.com/current/docker/index.html)
|
||||
* [Docker hub](https://hub.docker.com/u/wazuh)
|
||||
|
||||
## Credits and thank you
|
||||
## Directory structure
|
||||
|
||||
These Docker containers are based on "deviantony" dockerfiles which can be found at [https://github.com/deviantony/docker-elk] (https://github.com/deviantony/docker-elk), and "xetus-oss" dockerfiles, which can be found at [https://github.com/xetus-oss/docker-ossec-server](https://github.com/xetus-oss/docker-ossec-server). We created our own fork, which we test and maintain. Thank you Anthony Lapenna for your contribution to the community.
|
||||
wazuh-docker
|
||||
├── docker-compose.yml
|
||||
├── kibana
|
||||
│ ├── config
|
||||
│ │ ├── entrypoint.sh
|
||||
│ │ └── kibana.yml
|
||||
│ └── Dockerfile
|
||||
├── LICENSE
|
||||
├── nginx
|
||||
│ ├── config
|
||||
│ │ └── entrypoint.sh
|
||||
│ └── Dockerfile
|
||||
├── README.md
|
||||
├── CHANGELOG.md
|
||||
├── VERSION
|
||||
├── test.txt
|
||||
└── wazuh
|
||||
├── config
|
||||
│ ├── data_dirs.env
|
||||
│ ├── entrypoint.sh
|
||||
│ ├── filebeat.runit.service
|
||||
│ ├── filebeat.yml
|
||||
│ ├── init.bash
|
||||
│ ├── postfix.runit.service
|
||||
│ ├── wazuh-api.runit.service
|
||||
│ └── wazuh.runit.service
|
||||
└── Dockerfile
|
||||
|
||||
## References
|
||||
|
||||
* [Wazuh website](http://wazuh.com)
|
||||
## Branches
|
||||
|
||||
* `stable` branch on correspond to the latest Wazuh-Docker stable version.
|
||||
* `master` branch contains the latest code, be aware of possible bugs on this branch.
|
||||
* `Wazuh.Version_ElasticStack.Version` (for example 3.10.2_7.3.2) branch. This branch contains the current release referenced in Docker Hub. The container images are installed under the current version of this branch.
|
||||
|
||||
## Credits and Thank you
|
||||
|
||||
These Docker containers are based on:
|
||||
|
||||
* "deviantony" dockerfiles which can be found at [https://github.com/deviantony/docker-elk](https://github.com/deviantony/docker-elk)
|
||||
* "xetus-oss" dockerfiles, which can be found at [https://github.com/xetus-oss/docker-ossec-server](https://github.com/xetus-oss/docker-ossec-server)
|
||||
|
||||
We thank you them and everyone else who has contributed to this project.
|
||||
|
||||
## License and copyright
|
||||
|
||||
Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
## Web references
|
||||
|
||||
[Wazuh website](http://wazuh.com)
|
||||
|
2
VERSION
Normal file
2
VERSION
Normal file
@@ -0,0 +1,2 @@
|
||||
WAZUH-DOCKER_VERSION="3.11.5_7.3.2"
|
||||
REVISION="31150"
|
@@ -1,71 +1,92 @@
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
version: '2'
|
||||
|
||||
services:
|
||||
wazuh:
|
||||
image: wazuh/wazuh
|
||||
image: wazuh/wazuh:3.10.2_7.3.2
|
||||
hostname: wazuh-manager
|
||||
restart: always
|
||||
ports:
|
||||
- "1514/udp:1514/udp"
|
||||
- "1514:1514/udp"
|
||||
- "1515:1515"
|
||||
- "514/udp:514/udp"
|
||||
- "514:514/udp"
|
||||
- "55000:55000"
|
||||
networks:
|
||||
- docker_elk
|
||||
# volumes:
|
||||
# - my-path:/var/ossec/data
|
||||
# - my-path:/etc/postfix
|
||||
depends_on:
|
||||
- elasticsearch
|
||||
logstash:
|
||||
image: wazuh/wazuh-logstash
|
||||
hostname: logstash
|
||||
restart: always
|
||||
command: -f /etc/logstash/conf.d/
|
||||
# volumes:
|
||||
# - my-path:/etc/logstash/conf.d
|
||||
links:
|
||||
- kibana
|
||||
- elasticsearch
|
||||
ports:
|
||||
- "5000:5000"
|
||||
networks:
|
||||
- docker_elk
|
||||
depends_on:
|
||||
- elasticsearch
|
||||
environment:
|
||||
- LS_HEAP_SIZE=2048m
|
||||
# depends_on:
|
||||
# - logstash
|
||||
# logstash:
|
||||
# image: wazuh/wazuh-elasticsearch:3.10.2_7.3.2
|
||||
# hostname: logstash
|
||||
# restart: always
|
||||
# links:
|
||||
# - elasticsearch:elasticsearch
|
||||
# ports:
|
||||
# - "5000:5000"
|
||||
# depends_on:
|
||||
# - elasticsearch
|
||||
# environment:
|
||||
# - LS_HEAP_SIZE=2048m
|
||||
# - SECURITY_ENABLED=no
|
||||
# - SECURITY_LOGSTASH_USER=service_logstash
|
||||
# - SECURITY_LOGSTASH_PASS=logstash_pass
|
||||
# - LOGSTASH_OUTPUT=https://elasticsearch:9200
|
||||
# - ELASTICSEARCH_URL=https://elasticsearch:9200
|
||||
# - SECURITY_CA_PEM=server.TEST-CA-signed.pem
|
||||
elasticsearch:
|
||||
image: elasticsearch:5.4.2
|
||||
image: wazuh/wazuh-elasticsearch:3.10.2_7.3.2
|
||||
hostname: elasticsearch
|
||||
restart: always
|
||||
command: elasticsearch -E node.name="node-1" -E cluster.name="wazuh" -E network.host=0.0.0.0
|
||||
ports:
|
||||
- "9200:9200"
|
||||
- "9300:9300"
|
||||
environment:
|
||||
ES_JAVA_OPTS: "-Xms2g -Xmx2g"
|
||||
# volumes:
|
||||
# - my-path:/usr/share/elasticsearch/data
|
||||
networks:
|
||||
- docker_elk
|
||||
- "ES_JAVA_OPTS=-Xms1g -Xmx1g"
|
||||
- ELASTICSEARCH_PROTOCOL=http
|
||||
- ELASTICSEARCH_IP=elasticsearch
|
||||
- ELASTICSEARCH_PORT=9200
|
||||
- SECURITY_ENABLED=no
|
||||
- SECURITY_ELASTIC_PASSWORD=elastic_pass
|
||||
- SECURITY_MAIN_NODE=elasticsearch
|
||||
- ELASTIC_CLUSTER=true
|
||||
- CLUSTER_NODE_MASTER=true
|
||||
- CLUSTER_MASTER_NODE_NAME=elasticsearch
|
||||
- CLUSTER_NODE_DATA=true
|
||||
- CLUSTER_NODE_INGEST=true
|
||||
- CLUSTER_MAX_NODES=3
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
mem_limit: 2g
|
||||
|
||||
kibana:
|
||||
image: wazuh/wazuh-kibana
|
||||
image: wazuh/wazuh-kibana:3.10.2_7.3.2
|
||||
hostname: kibana
|
||||
restart: always
|
||||
ports:
|
||||
- "5601:5601"
|
||||
networks:
|
||||
- docker_elk
|
||||
depends_on:
|
||||
- elasticsearch
|
||||
entrypoint: sh wait-for-it.sh elasticsearch
|
||||
# environment:
|
||||
# - "WAZUH_KIBANA_PLUGIN_URL=http://your.repo/wazuhapp-2.0_5.4.2.zip"
|
||||
links:
|
||||
- elasticsearch:elasticsearch
|
||||
- wazuh:wazuh
|
||||
environment:
|
||||
- ELASTICSEARCH_URL=https://elasticsearch:9200
|
||||
- SECURITY_ENABLED=no
|
||||
- SECURITY_KIBANA_USER=service_kibana
|
||||
- SECURITY_KIBANA_PASS=kibana_pass
|
||||
- ELASTICSEARCH_KIBANA_IP=https://elasticsearch:9200
|
||||
- SECURITY_CA_PEM=server.TEST-CA-signed.pem
|
||||
ports:
|
||||
- "5601:5601"
|
||||
|
||||
networks:
|
||||
docker_elk:
|
||||
driver: bridge
|
||||
ipam:
|
||||
config:
|
||||
- subnet: 172.25.0.0/24
|
||||
nginx:
|
||||
image: wazuh/wazuh-nginx:3.10.2_7.3.2
|
||||
hostname: nginx
|
||||
restart: always
|
||||
environment:
|
||||
- NGINX_PORT=443
|
||||
- NGINX_CREDENTIALS
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
depends_on:
|
||||
- kibana
|
||||
links:
|
||||
- kibana:kibana
|
96
elasticsearch/Dockerfile
Normal file
96
elasticsearch/Dockerfile
Normal file
@@ -0,0 +1,96 @@
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
ARG ELASTIC_VERSION=7.4.2
|
||||
FROM docker.elastic.co/elasticsearch/elasticsearch:${ELASTIC_VERSION}
|
||||
ARG TEMPLATE_VERSION=v3.11.4
|
||||
|
||||
ENV ELASTICSEARCH_URL="http://elasticsearch:9200"
|
||||
|
||||
ENV API_USER="foo" \
|
||||
API_PASS="bar"
|
||||
|
||||
ENV XPACK_ML="true"
|
||||
|
||||
ENV ENABLE_CONFIGURE_S3="false"
|
||||
|
||||
ENV WAZUH_ALERTS_SHARDS="1" \
|
||||
WAZUH_ALERTS_REPLICAS="0"
|
||||
|
||||
ADD https://raw.githubusercontent.com/wazuh/wazuh/$TEMPLATE_VERSION/extensions/elasticsearch/7.x/wazuh-template.json /usr/share/elasticsearch/config
|
||||
|
||||
RUN yum install epel-release -y && \
|
||||
yum install jq -y
|
||||
|
||||
# This CA is created for testing. Please set your own CA zip containing the key and the signed certificate.
|
||||
# command: $ docker build <elasticsearch_directory> --build-arg SECURITY_CA_PEM_LOCATION=<CA_PEM_LOCATION> --build-arg SECURITY_CA_KEY_LOCATION=<CA_KEY_LOCATION>
|
||||
# ENV variables are necessary: SECURITY_CA_PEM, SECURITY_CA_KEY, SECURITY_CA_TRUST, SECURITY_OPENSSL_CONF
|
||||
# Example:
|
||||
# ARG SECURITY_CA_PEM_LOCATION="config/server.TEST-CA-signed.pem"
|
||||
# ARG SECURITY_CA_KEY_LOCATION="config/server.TEST-CA.key"
|
||||
# ARG SECURITY_OPENSSL_CONF_LOCATION="config/TEST_openssl.cnf"
|
||||
# ARG SECURITY_CA_TRUST_LOCATION="config/server.TEST-CA-signed.pem"
|
||||
ARG SECURITY_CA_PEM_LOCATION=""
|
||||
ARG SECURITY_CA_KEY_LOCATION=""
|
||||
ARG SECURITY_OPENSSL_CONF_LOCATION=""
|
||||
ARG SECURITY_CA_TRUST_LOCATION=""
|
||||
|
||||
# Elasticearch cluster configuration environment variables
|
||||
# If ELASTIC_CLUSTER is set to "true" the following variables will be added to the Elasticsearch configuration
|
||||
# CLUSTER_INITIAL_MASTER_NODES set to own node by default.
|
||||
ENV ELASTIC_CLUSTER="false" \
|
||||
CLUSTER_NAME="wazuh" \
|
||||
CLUSTER_NODE_MASTER="false" \
|
||||
CLUSTER_NODE_DATA="true" \
|
||||
CLUSTER_NODE_INGEST="true" \
|
||||
CLUSTER_MEMORY_LOCK="true" \
|
||||
CLUSTER_DISCOVERY_SERVICE="wazuh-elasticsearch" \
|
||||
CLUSTER_NUMBER_OF_MASTERS="2" \
|
||||
CLUSTER_MAX_NODES="1" \
|
||||
CLUSTER_DELAYED_TIMEOUT="1m" \
|
||||
CLUSTER_INITIAL_MASTER_NODES="wazuh-elasticsearch" \
|
||||
CLUSTER_DISCOVERY_SEED="elasticsearch"
|
||||
|
||||
# CA cert for Transport SSL
|
||||
ADD $SECURITY_CA_PEM_LOCATION /usr/share/elasticsearch/config
|
||||
ADD $SECURITY_CA_KEY_LOCATION /usr/share/elasticsearch/config
|
||||
ADD $SECURITY_OPENSSL_CONF_LOCATION /usr/share/elasticsearch/config
|
||||
ADD $SECURITY_CA_TRUST_LOCATION /usr/share/elasticsearch/config
|
||||
|
||||
RUN mkdir /entrypoint-scripts
|
||||
|
||||
COPY config/entrypoint.sh /entrypoint.sh
|
||||
|
||||
RUN chmod 755 /entrypoint.sh
|
||||
|
||||
RUN bin/elasticsearch-plugin install repository-s3 -b
|
||||
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/10-config_cluster.sh /entrypoint-scripts/10-config_cluster.sh
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/15-get_CA_key.sh /entrypoint-scripts/15-get_CA_key.sh
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/20-security_instances.sh /entrypoint-scripts/20-security_instances.sh
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/22-security_certs.sh /entrypoint-scripts/22-security_certs.sh
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/24-security_configuration.sh /entrypoint-scripts/24-security_configuration.sh
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/26-security_keystore.sh /entrypoint-scripts/26-security_keystore.sh
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/30-decrypt_credentials.sh /entrypoint-scripts/30-decrypt_credentials.sh
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/35-entrypoint.sh /entrypoint-scripts/35-entrypoint.sh
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/35-entrypoint_load_settings.sh ./
|
||||
COPY config/35-load_settings_configure_s3.sh ./config/35-load_settings_configure_s3.sh
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/35-load_settings_users_management.sh ./
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/35-load_settings_policies.sh ./
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/35-load_settings_templates.sh ./
|
||||
COPY --chown=elasticsearch:elasticsearch ./config/35-load_settings_aliases.sh ./
|
||||
RUN chmod +x /entrypoint-scripts/10-config_cluster.sh && \
|
||||
chmod +x /entrypoint-scripts/15-get_CA_key.sh && \
|
||||
chmod +x /entrypoint-scripts/20-security_instances.sh && \
|
||||
chmod +x /entrypoint-scripts/22-security_certs.sh && \
|
||||
chmod +x /entrypoint-scripts/24-security_configuration.sh && \
|
||||
chmod +x /entrypoint-scripts/26-security_keystore.sh && \
|
||||
chmod +x /entrypoint-scripts/30-decrypt_credentials.sh && \
|
||||
chmod +x /entrypoint-scripts/35-entrypoint.sh && \
|
||||
chmod +x ./35-entrypoint_load_settings.sh && \
|
||||
chmod 755 ./config/35-load_settings_configure_s3.sh && \
|
||||
chmod +x ./35-load_settings_users_management.sh && \
|
||||
chmod +x ./35-load_settings_policies.sh && \
|
||||
chmod +x ./35-load_settings_templates.sh && \
|
||||
chmod +x ./35-load_settings_aliases.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
CMD ["elasticsearch"]
|
93
elasticsearch/config/10-config_cluster.sh
Normal file
93
elasticsearch/config/10-config_cluster.sh
Normal file
@@ -0,0 +1,93 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
elastic_config_file="/usr/share/elasticsearch/config/elasticsearch.yml"
|
||||
original_file="/usr/share/elasticsearch/config/original-elasticsearch.yml"
|
||||
ELASTIC_HOSTAME=`hostname`
|
||||
|
||||
echo "CLUSTER: - Prepare Configuration"
|
||||
echo "CLUSTER: - Hostname"
|
||||
echo $ELASTIC_HOSTAME
|
||||
echo "CLUSTER: - Security main node"
|
||||
echo $SECURITY_MAIN_NODE
|
||||
echo "CLUSTER: - Discovery seed"
|
||||
echo $CLUSTER_DISCOVERY_SEED
|
||||
echo "CLUSTER: - Elastic cluster flag"
|
||||
echo $ELASTIC_CLUSTER
|
||||
echo "CLUSTER: - Node Master"
|
||||
echo $CLUSTER_NODE_MASTER
|
||||
echo "CLUSTER: - Node Data"
|
||||
echo $CLUSTER_NODE_DATA
|
||||
echo "CLUSTER: - Node Ingest"
|
||||
echo $CLUSTER_NODE_INGEST
|
||||
|
||||
cp $elastic_config_file $original_file
|
||||
|
||||
remove_single_node_conf(){
|
||||
if grep -Fq "discovery.type" $1; then
|
||||
sed -i '/discovery.type\: /d' $1
|
||||
fi
|
||||
}
|
||||
|
||||
remove_cluster_config(){
|
||||
sed -i '/# cluster node/,/# end cluster config/d' $1
|
||||
}
|
||||
|
||||
# If Elasticsearch cluster is enable, then set up the elasticsearch.yml
|
||||
if [[ $ELASTIC_CLUSTER == "true" && $CLUSTER_NODE_MASTER != "" && $CLUSTER_NODE_DATA != "" && $CLUSTER_NODE_INGEST != "" && $ELASTIC_HOSTAME != "" ]]; then
|
||||
# Remove the old configuration
|
||||
remove_single_node_conf $elastic_config_file
|
||||
remove_cluster_config $elastic_config_file
|
||||
echo "CLUSTER: - Remove old configuration"
|
||||
|
||||
if [[ $ELASTIC_HOSTAME == $SECURITY_MAIN_NODE ]]; then
|
||||
# Add the master configuration
|
||||
# cluster.initial_master_nodes for bootstrap the cluster
|
||||
echo "CLUSTER: - Add the master configuration"
|
||||
|
||||
cat > $elastic_config_file << EOF
|
||||
# cluster node
|
||||
cluster.name: $CLUSTER_NAME
|
||||
bootstrap.memory_lock: $CLUSTER_MEMORY_LOCK
|
||||
network.host: 0.0.0.0
|
||||
node.name: $ELASTIC_HOSTAME
|
||||
node.master: $CLUSTER_NODE_MASTER
|
||||
node.data: $CLUSTER_NODE_DATA
|
||||
node.ingest: $CLUSTER_NODE_INGEST
|
||||
node.max_local_storage_nodes: $CLUSTER_MAX_NODES
|
||||
cluster.initial_master_nodes:
|
||||
- $ELASTIC_HOSTAME
|
||||
# end cluster config"
|
||||
EOF
|
||||
|
||||
elif [[ $CLUSTER_DISCOVERY_SEED != "" ]]; then
|
||||
# Remove the old configuration
|
||||
remove_single_node_conf $elastic_config_file
|
||||
remove_cluster_config $elastic_config_file
|
||||
echo "CLUSTER: - Add standard cluster configuration."
|
||||
|
||||
cat > $elastic_config_file << EOF
|
||||
# cluster node
|
||||
cluster.name: $CLUSTER_NAME
|
||||
bootstrap.memory_lock: $CLUSTER_MEMORY_LOCK
|
||||
network.host: 0.0.0.0
|
||||
node.name: $ELASTIC_HOSTAME
|
||||
node.master: $CLUSTER_NODE_MASTER
|
||||
node.data: $CLUSTER_NODE_DATA
|
||||
node.ingest: $CLUSTER_NODE_INGEST
|
||||
node.max_local_storage_nodes: $CLUSTER_MAX_NODES
|
||||
discovery.seed_hosts:
|
||||
- $CLUSTER_DISCOVERY_SEED
|
||||
# end cluster config"
|
||||
EOF
|
||||
fi
|
||||
# If the cluster is disabled, then set a single-node configuration
|
||||
else
|
||||
# Remove the old configuration
|
||||
remove_single_node_conf $elastic_config_file
|
||||
remove_cluster_config $elastic_config_file
|
||||
echo "discovery.type: single-node" >> $elastic_config_file
|
||||
echo "CLUSTER: - Discovery type: single-node"
|
||||
fi
|
||||
|
||||
echo "CLUSTER: - Configured"
|
11
elasticsearch/config/15-get_CA_key.sh
Normal file
11
elasticsearch/config/15-get_CA_key.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Decrypt credentials.
|
||||
# If the CA key is encrypted, it must be decrypted for later use.
|
||||
##############################################################################
|
||||
|
||||
echo "TO DO"
|
||||
|
||||
# TO DO
|
21
elasticsearch/config/20-security_instances.sh
Normal file
21
elasticsearch/config/20-security_instances.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# instances.yml
|
||||
# This file is necessary for the creation of the Elasticsaerch certificate.
|
||||
##############################################################################
|
||||
|
||||
if [[ $SECURITY_ENABLED == "yes" ]]; then
|
||||
echo "SECURITY - Setting Elasticserach security."
|
||||
|
||||
# instance.yml to be added by the user.
|
||||
# Example:
|
||||
# echo "
|
||||
# instances:
|
||||
# - name: \"elasticsearch\"
|
||||
# dns:
|
||||
# - \"elasticsearch\"
|
||||
# " > /user/share/elasticsearch/instances.yml
|
||||
|
||||
fi
|
16
elasticsearch/config/22-security_certs.sh
Normal file
16
elasticsearch/config/22-security_certs.sh
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Creation and management of certificates.
|
||||
##############################################################################
|
||||
|
||||
if [[ $SECURITY_ENABLED == "yes" ]]; then
|
||||
echo "SECURITY - Elasticserach security certificates."
|
||||
|
||||
# Creation of the certificate for Elasticsearch.
|
||||
# After the execution of this script will have generated
|
||||
# the Elasticsearch certificate and related keys and passphrase.
|
||||
# Example: TO DO
|
||||
|
||||
fi
|
32
elasticsearch/config/24-security_configuration.sh
Normal file
32
elasticsearch/config/24-security_configuration.sh
Normal file
@@ -0,0 +1,32 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Adapt elasticsearch.yml configuration file
|
||||
##############################################################################
|
||||
|
||||
if [[ $SECURITY_ENABLED == "yes" ]]; then
|
||||
echo "SECURITY - Elasticserach security configuration."
|
||||
|
||||
echo "SECURITY - Setting configuration options."
|
||||
|
||||
# Settings for elasticsearch.yml to be added by the user.
|
||||
# Example:
|
||||
# echo "
|
||||
# # Required to set the passwords and TLS options
|
||||
# xpack.security.enabled: true
|
||||
# xpack.security.transport.ssl.enabled: true
|
||||
# xpack.security.transport.ssl.verification_mode: certificate
|
||||
# xpack.security.transport.ssl.key: /usr/share/elasticsearch/config/elasticsearch/elasticsearch.key
|
||||
# xpack.security.transport.ssl.certificate: /usr/share/elasticsearch/config/elasticsearch.cert.pem
|
||||
# xpack.security.transport.ssl.certificate_authorities: [\"/usr/share/elasticsearch/config/ca.cert.pem\"]
|
||||
|
||||
# # HTTP layer
|
||||
# xpack.security.http.ssl.enabled: true
|
||||
# xpack.security.http.ssl.verification_mode: certificate
|
||||
# xpack.security.http.ssl.key: /usr/share/elasticsearch/config/elasticsearch/elasticsearch.key
|
||||
# xpack.security.http.ssl.certificate: /usr/share/elasticsearch/config/elasticsearch.cert.pem
|
||||
# xpack.security.http.ssl.certificate_authorities: [\"/usr/share/elasticsearch/config/ca.cert.pem\"]
|
||||
# " >> /usr/share/elasticsearch/config/elasticsearch.yml
|
||||
|
||||
fi
|
21
elasticsearch/config/26-security_keystore.sh
Normal file
21
elasticsearch/config/26-security_keystore.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Adapt elasticsearch.yml keystore management
|
||||
##############################################################################
|
||||
|
||||
if [[ $SECURITY_ENABLED == "yes" ]]; then
|
||||
echo "SECURITY - Elasticserach keystore management."
|
||||
|
||||
# Create keystore
|
||||
# /usr/share/elasticsearch/bin/elasticsearch-keystore create
|
||||
|
||||
# Add keys to keystore by the user.
|
||||
# Example
|
||||
# echo -e "$abcd_1234" | /usr/share/elasticsearch/bin/elasticsearch-keystore add xpack.security.transport.ssl.secure_key_passphrase --stdin
|
||||
# echo -e "$abcd_1234" | /usr/share/elasticsearch/bin/elasticsearch-keystore add xpack.security.http.ssl.secure_key_passphrase --stdin
|
||||
|
||||
else
|
||||
echo "SECURITY - Elasticsearch security not established."
|
||||
fi
|
15
elasticsearch/config/30-decrypt_credentials.sh
Normal file
15
elasticsearch/config/30-decrypt_credentials.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Decrypt credentials.
|
||||
# If the credentials of the users to be created are encrypted,
|
||||
# they must be decrypted for later use.
|
||||
##############################################################################
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
echo "Security credentials file not used. Nothing to do."
|
||||
else
|
||||
echo "TO DO"
|
||||
fi
|
||||
# TO DO
|
70
elasticsearch/config/35-entrypoint.sh
Normal file
70
elasticsearch/config/35-entrypoint.sh
Normal file
@@ -0,0 +1,70 @@
|
||||
#!/bin/bash
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
# For more information https://github.com/elastic/elasticsearch-docker/blob/6.8.1/build/elasticsearch/bin/docker-entrypoint.sh
|
||||
|
||||
set -e
|
||||
|
||||
# Files created by Elasticsearch should always be group writable too
|
||||
umask 0002
|
||||
|
||||
run_as_other_user_if_needed() {
|
||||
if [[ "$(id -u)" == "0" ]]; then
|
||||
# If running as root, drop to specified UID and run command
|
||||
exec chroot --userspec=1000 / "${@}"
|
||||
else
|
||||
# Either we are running in Openshift with random uid and are a member of the root group
|
||||
# or with a custom --user
|
||||
exec "${@}"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
#Disabling xpack features
|
||||
|
||||
elasticsearch_config_file="/usr/share/elasticsearch/config/elasticsearch.yml"
|
||||
if grep -Fq "#xpack features" "$elasticsearch_config_file" ;
|
||||
then
|
||||
declare -A CONFIG_MAP=(
|
||||
[xpack.ml.enabled]=$XPACK_ML
|
||||
)
|
||||
for i in "${!CONFIG_MAP[@]}"
|
||||
do
|
||||
if [ "${CONFIG_MAP[$i]}" != "" ]; then
|
||||
sed -i 's/.'"$i"'.*/'"$i"': '"${CONFIG_MAP[$i]}"'/' $elasticsearch_config_file
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "
|
||||
#xpack features
|
||||
xpack.ml.enabled: $XPACK_ML
|
||||
" >> $elasticsearch_config_file
|
||||
fi
|
||||
|
||||
# Run load settings script.
|
||||
|
||||
bash /usr/share/elasticsearch/35-entrypoint_load_settings.sh &
|
||||
|
||||
# Execute elasticsearch
|
||||
|
||||
|
||||
if [[ $SECURITY_ENABLED == "yes" ]]; then
|
||||
echo "Change Elastic password"
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
run_as_other_user_if_needed echo "$SECURITY_ELASTIC_PASSWORD" | elasticsearch-keystore add -xf 'bootstrap.password'
|
||||
else
|
||||
input=${SECURITY_CREDENTIALS_FILE}
|
||||
ELASTIC_PASSWORD_FROM_FILE=""
|
||||
while IFS= read -r line
|
||||
do
|
||||
if [[ $line == *"ELASTIC_PASSWORD"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
ELASTIC_PASSWORD_FROM_FILE=${arrIN[1]}
|
||||
fi
|
||||
done < "$input"
|
||||
run_as_other_user_if_needed echo "$ELASTIC_PASSWORD_FROM_FILE" | elasticsearch-keystore add -xf 'bootstrap.password'
|
||||
fi
|
||||
echo "Elastic password changed"
|
||||
fi
|
||||
|
||||
run_as_other_user_if_needed /usr/share/elasticsearch/bin/elasticsearch
|
172
elasticsearch/config/35-entrypoint_load_settings.sh
Normal file
172
elasticsearch/config/35-entrypoint_load_settings.sh
Normal file
@@ -0,0 +1,172 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
set -e
|
||||
|
||||
##############################################################################
|
||||
# Set Elasticsearch API url and Wazuh API url.
|
||||
##############################################################################
|
||||
|
||||
if [[ "x${ELASTICSEARCH_PROTOCOL}" = "x" || "x${ELASTICSEARCH_IP}" = "x" || "x${ELASTICSEARCH_PORT}" = "x" ]]; then
|
||||
el_url="http://elasticsearch:9200"
|
||||
else
|
||||
el_url="${ELASTICSEARCH_PROTOCOL}://${ELASTICSEARCH_IP}:${ELASTICSEARCH_PORT}"
|
||||
fi
|
||||
|
||||
if [[ "x${WAZUH_API_URL}" = "x" ]]; then
|
||||
wazuh_url="https://wazuh"
|
||||
else
|
||||
wazuh_url="${WAZUH_API_URL}"
|
||||
fi
|
||||
|
||||
echo "LOAD SETTINGS - Elasticsearch url: $el_url"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# If Elasticsearch security is enabled get the elastic user password and
|
||||
# WAZUH API credentials.
|
||||
##############################################################################
|
||||
|
||||
ELASTIC_PASS=""
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
ELASTIC_PASS=${SECURITY_ELASTIC_PASSWORD}
|
||||
else
|
||||
input=${SECURITY_CREDENTIALS_FILE}
|
||||
while IFS= read -r line
|
||||
do
|
||||
if [[ $line == *"ELASTIC_PASSWORD"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
ELASTIC_PASS=${arrIN[1]}
|
||||
fi
|
||||
done < "$input"
|
||||
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Set authentication for curl if Elasticsearch security is enabled.
|
||||
##############################################################################
|
||||
|
||||
if [ ${SECURITY_ENABLED} != "no" ]; then
|
||||
auth="-uelastic:${ELASTIC_PASS} -k"
|
||||
echo "LOAD SETTINGS - authentication for curl established."
|
||||
elif [[ ${ENABLED_XPACK} != "true" || "x${ELASTICSEARCH_USERNAME}" = "x" || "x${ELASTICSEARCH_PASSWORD}" = "x" ]]; then
|
||||
auth=""
|
||||
echo "LOAD SETTINGS - authentication for curl not established."
|
||||
else
|
||||
auth="--user ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD}"
|
||||
echo "LOAD SETTINGS - authentication for curl established."
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Wait until Elasticsearch is active.
|
||||
##############################################################################
|
||||
|
||||
until curl ${auth} -XGET $el_url; do
|
||||
>&2 echo "LOAD SETTINGS - Elastic is unavailable - sleeping"
|
||||
sleep 5
|
||||
done
|
||||
|
||||
>&2 echo "LOAD SETTINGS - Elastic is up - executing command"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Configure S3 repository for Elasticsearch snapshots.
|
||||
##############################################################################
|
||||
|
||||
if [ $ENABLE_CONFIGURE_S3 ]; then
|
||||
#Wait for Elasticsearch to be ready to create the repository
|
||||
sleep 10
|
||||
>&2 echo "S3 - Configure S3"
|
||||
if [ "x$S3_PATH" != "x" ]; then
|
||||
>&2 echo "S3 - Path: $S3_PATH"
|
||||
if [ "x$S3_ELASTIC_MAJOR" != "x" ]; then
|
||||
>&2 echo "S3 - Elasticsearch major version: $S3_ELASTIC_MAJOR"
|
||||
echo "LOAD SETTINGS - Run 35-load_settings_configure_s3.sh."
|
||||
bash /usr/share/elasticsearch/config/35-load_settings_configure_s3.sh $el_url $S3_BUCKET_NAME $S3_PATH $S3_REPOSITORY_NAME $S3_ELASTIC_MAJOR
|
||||
else
|
||||
>&2 echo "S3 - Elasticserach major version not given."
|
||||
echo "LOAD SETTINGS - Run 35-load_settings_configure_s3.sh."
|
||||
bash /usr/share/elasticsearch/config/35-load_settings_configure_s3.sh $el_url $S3_BUCKET_NAME $S3_PATH $S3_REPOSITORY_NAME
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Load custom policies.
|
||||
##############################################################################
|
||||
|
||||
echo "LOAD SETTINGS - Loading custom Elasticsearch policies."
|
||||
bash /usr/share/elasticsearch/35-load_settings_policies.sh
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Modify wazuh-alerts template shards and replicas
|
||||
##############################################################################
|
||||
|
||||
echo "LOAD SETTINGS - Change shards and replicas of wazuh-alerts template."
|
||||
sed -i 's:"index.number_of_shards"\: "3":"index.number_of_shards"\: "'$WAZUH_ALERTS_SHARDS'":g' /usr/share/elasticsearch/config/wazuh-template.json
|
||||
sed -i 's:"index.number_of_replicas"\: "0":"index.number_of_replicas"\: "'$WAZUH_ALERTS_REPLICAS'":g' /usr/share/elasticsearch/config/wazuh-template.json
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Load default templates
|
||||
##############################################################################
|
||||
|
||||
echo "LOAD SETTINGS - Loading wazuh-alerts template"
|
||||
bash /usr/share/elasticsearch/35-load_settings_templates.sh
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Load custom aliases.
|
||||
##############################################################################
|
||||
|
||||
echo "LOAD SETTINGS - Loading custom Elasticsearch aliases."
|
||||
bash /usr/share/elasticsearch/35-load_settings_aliases.sh
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Elastic Stack users creation.
|
||||
# Only security main node can manage users.
|
||||
##############################################################################
|
||||
|
||||
echo "LOAD SETTINGS - Run users_management.sh."
|
||||
MY_HOSTNAME=`hostname`
|
||||
echo "LOAD SETTINGS - Hostname: $MY_HOSTNAME"
|
||||
if [[ $SECURITY_MAIN_NODE == $MY_HOSTNAME ]]; then
|
||||
bash /usr/share/elasticsearch/35-load_settings_users_management.sh &
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Enable xpack.monitoring.collection
|
||||
##############################################################################
|
||||
|
||||
curl -XPUT "$el_url/_cluster/settings" ${auth} -H 'Content-Type: application/json' -d'
|
||||
{
|
||||
"persistent": {
|
||||
"xpack.monitoring.collection.enabled": true
|
||||
}
|
||||
}
|
||||
'
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Set cluster delayed timeout when node falls
|
||||
##############################################################################
|
||||
|
||||
curl -X PUT "$el_url/_all/_settings" ${auth} -H 'Content-Type: application/json' -d'
|
||||
{
|
||||
"settings": {
|
||||
"index.unassigned.node_left.delayed_timeout": "'"$CLUSTER_DELAYED_TIMEOUT"'"
|
||||
}
|
||||
}
|
||||
'
|
||||
echo "LOAD SETTINGS - cluster delayed timeout changed."
|
||||
|
||||
echo "LOAD SETTINGS - Elasticsearch is ready."
|
86
elasticsearch/config/35-load_settings_aliases.sh
Normal file
86
elasticsearch/config/35-load_settings_aliases.sh
Normal file
@@ -0,0 +1,86 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
set -e
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Set Elasticsearch API url
|
||||
##############################################################################
|
||||
|
||||
if [[ "x${ELASTICSEARCH_PROTOCOL}" = "x" || "x${ELASTICSEARCH_IP}" = "x" || "x${ELASTICSEARCH_PORT}" = "x" ]]; then
|
||||
el_url="http://elasticsearch:9200"
|
||||
else
|
||||
el_url="${ELASTICSEARCH_PROTOCOL}://${ELASTICSEARCH_IP}:${ELASTICSEARCH_PORT}"
|
||||
fi
|
||||
|
||||
echo "ALIASES - Elasticsearch url: $el_url"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# If Elasticsearch security is enabled get the elastic user password.
|
||||
##############################################################################
|
||||
|
||||
ELASTIC_PASS=""
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
ELASTIC_PASS=${SECURITY_ELASTIC_PASSWORD}
|
||||
else
|
||||
input=${SECURITY_CREDENTIALS_FILE}
|
||||
while IFS= read -r line
|
||||
do
|
||||
if [[ $line == *"ELASTIC_PASSWORD"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
ELASTIC_PASS=${arrIN[1]}
|
||||
fi
|
||||
done < "$input"
|
||||
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# If Elasticsearch security is enabled get the users credentials.
|
||||
##############################################################################
|
||||
|
||||
# The user must get the credentials of the users.
|
||||
# TO DO.
|
||||
|
||||
##############################################################################
|
||||
# Set authentication for curl if Elasticsearch security is enabled.
|
||||
##############################################################################
|
||||
|
||||
if [ ${SECURITY_ENABLED} != "no" ]; then
|
||||
auth="-uelastic:${ELASTIC_PASS} -k"
|
||||
echo "ALIASES - authentication for curl established."
|
||||
elif [[ ${ENABLED_XPACK} != "true" || "x${ELASTICSEARCH_USERNAME}" = "x" || "x${ELASTICSEARCH_PASSWORD}" = "x" ]]; then
|
||||
auth=""
|
||||
echo "ALIASES - authentication for curl not established."
|
||||
else
|
||||
auth="--user ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD}"
|
||||
echo "ALIASES - authentication for curl established."
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Wait until Elasticsearch is active.
|
||||
##############################################################################
|
||||
|
||||
until curl ${auth} -XGET $el_url; do
|
||||
>&2 echo "ALIASES - Elastic is unavailable - sleeping"
|
||||
sleep 5
|
||||
done
|
||||
|
||||
>&2 echo "ALIASES - Elastic is up - executing command"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Add custom aliases.
|
||||
##############################################################################
|
||||
|
||||
# The user must add the credentials of the users.
|
||||
# TO DO.
|
||||
# Example
|
||||
# echo "ALIASES - Add custom_user password and role:"
|
||||
# curl ${auth} -k -XPOST -H 'Content-Type: application/json' 'https://localhost:9200/_ilm/policy/my_policy?pretty' -d'
|
||||
# { "policy": { "phases": { "hot": { "actions": { "rollover": {"max_size": "50GB", "max_age": "5m"}}}}}}'
|
||||
|
107
elasticsearch/config/35-load_settings_configure_s3.sh
Normal file
107
elasticsearch/config/35-load_settings_configure_s3.sh
Normal file
@@ -0,0 +1,107 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
set -e
|
||||
|
||||
|
||||
##############################################################################
|
||||
# If Secure access to Kibana is enabled, we must set the credentials for
|
||||
# monitoring
|
||||
##############################################################################
|
||||
|
||||
ELASTIC_PASS=""
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
ELASTIC_PASS=${SECURITY_ELASTIC_PASSWORD}
|
||||
|
||||
else
|
||||
input=${SECURITY_CREDENTIALS_FILE}
|
||||
while IFS= read -r line
|
||||
do
|
||||
if [[ $line == *"ELASTIC_PASSWORD"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
ELASTIC_PASS=${arrIN[1]}
|
||||
fi
|
||||
done < "$input"
|
||||
|
||||
fi
|
||||
|
||||
|
||||
if [ ${SECURITY_ENABLED} != "no" ]; then
|
||||
auth="-u elastic:${ELASTIC_PASS} -k"
|
||||
else
|
||||
auth=""
|
||||
fi
|
||||
|
||||
# Check number of arguments passed to configure_s3.sh. If it is different from 4 or 5, the process will finish with error.
|
||||
# param 1: number of arguments passed to configure_s3.sh
|
||||
|
||||
function CheckArgs()
|
||||
{
|
||||
if [ $1 != 4 ] && [ $1 != 5 ];then
|
||||
echo "Use: configure_s3.sh <Elastic_Server_IP:Port> <Bucket> <Path> <RepositoryName> (By default <current_elasticsearch_major_version> is added to the path and the repository name)"
|
||||
echo "or use: configure_s3.sh <Elastic_Server_IP:Port> <Bucket> <Path> <RepositoryName> <Elasticsearch major version>"
|
||||
exit 1
|
||||
|
||||
fi
|
||||
}
|
||||
|
||||
# Create S3 repository from base_path <path>/<elasticsearch_major_version> (if there is no <Elasticsearch major version> argument, current version is added)
|
||||
# Repository name would be <RepositoryName>-<elasticsearch_major_version> (if there is no <Elasticsearch major version> argument, current version is added)
|
||||
# param 1: <Elastic_Server_IP:Port>
|
||||
# param 2: <Bucket>
|
||||
# param 3: <Path>
|
||||
# param 4: <RepositoryName>
|
||||
# param 5: Optional <Elasticsearch major version>
|
||||
# output: It will show "acknowledged" if the repository has been successfully created
|
||||
|
||||
function CreateRepo()
|
||||
{
|
||||
|
||||
elastic_ip_port="$2"
|
||||
bucket_name="$3"
|
||||
path="$4"
|
||||
repository_name="$5"
|
||||
|
||||
if [ $1 == 5 ];then
|
||||
version="$6"
|
||||
else
|
||||
version=`curl ${auth} -s $elastic_ip_port | grep number | cut -d"\"" -f4 | cut -c1`
|
||||
fi
|
||||
|
||||
if ! [[ "$version" =~ ^[0-9]+$ ]];then
|
||||
echo "Elasticsearch major version must be an integer"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
repository="$repository_name-$version"
|
||||
s3_path="$path/$version"
|
||||
|
||||
>&2 echo "Create S3 repository"
|
||||
|
||||
until curl ${auth} -X PUT "$elastic_ip_port/_snapshot/$repository" -H 'Content-Type: application/json' -d' {"type": "s3", "settings": { "bucket": "'$bucket_name'", "base_path": "'$s3_path'"} }'; do
|
||||
>&2 echo "Elastic is unavailable, S3 repository not created - sleeping"
|
||||
sleep 5
|
||||
done
|
||||
|
||||
>&2 echo "S3 repository created"
|
||||
|
||||
|
||||
}
|
||||
|
||||
# Run functions CheckArgs and CreateRepo
|
||||
# param 1: number of arguments passed to configure_s3.sh
|
||||
# param 2: <Elastic_Server_IP:Port>
|
||||
# param 3: <Bucket>
|
||||
# param 4: <Path>
|
||||
# param 5: <RepositoryName>
|
||||
# param 6: Optional <Elasticsearch major version>
|
||||
|
||||
function Main()
|
||||
{
|
||||
CheckArgs $1
|
||||
|
||||
CreateRepo $1 $2 $3 $4 $5 $6
|
||||
}
|
||||
|
||||
Main $# $1 $2 $3 $4 $5
|
86
elasticsearch/config/35-load_settings_policies.sh
Normal file
86
elasticsearch/config/35-load_settings_policies.sh
Normal file
@@ -0,0 +1,86 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
set -e
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Set Elasticsearch API url
|
||||
##############################################################################
|
||||
|
||||
if [[ "x${ELASTICSEARCH_PROTOCOL}" = "x" || "x${ELASTICSEARCH_IP}" = "x" || "x${ELASTICSEARCH_PORT}" = "x" ]]; then
|
||||
el_url="http://elasticsearch:9200"
|
||||
else
|
||||
el_url="${ELASTICSEARCH_PROTOCOL}://${ELASTICSEARCH_IP}:${ELASTICSEARCH_PORT}"
|
||||
fi
|
||||
|
||||
echo "POLICIES - Elasticsearch url: $el_url"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# If Elasticsearch security is enabled get the elastic user password.
|
||||
##############################################################################
|
||||
|
||||
ELASTIC_PASS=""
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
ELASTIC_PASS=${SECURITY_ELASTIC_PASSWORD}
|
||||
else
|
||||
input=${SECURITY_CREDENTIALS_FILE}
|
||||
while IFS= read -r line
|
||||
do
|
||||
if [[ $line == *"ELASTIC_PASSWORD"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
ELASTIC_PASS=${arrIN[1]}
|
||||
fi
|
||||
done < "$input"
|
||||
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# If Elasticsearch security is enabled get the users credentials.
|
||||
##############################################################################
|
||||
|
||||
# The user must get the credentials of the users.
|
||||
# TO DO.
|
||||
|
||||
##############################################################################
|
||||
# Set authentication for curl if Elasticsearch security is enabled.
|
||||
##############################################################################
|
||||
|
||||
if [ ${SECURITY_ENABLED} != "no" ]; then
|
||||
auth="-uelastic:${ELASTIC_PASS} -k"
|
||||
echo "POLICIES - authentication for curl established."
|
||||
elif [[ ${ENABLED_XPACK} != "true" || "x${ELASTICSEARCH_USERNAME}" = "x" || "x${ELASTICSEARCH_PASSWORD}" = "x" ]]; then
|
||||
auth=""
|
||||
echo "POLICIES - authentication for curl not established."
|
||||
else
|
||||
auth="--user ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD}"
|
||||
echo "POLICIES - authentication for curl established."
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Wait until Elasticsearch is active.
|
||||
##############################################################################
|
||||
|
||||
until curl ${auth} -XGET $el_url; do
|
||||
>&2 echo "POLICIES - Elastic is unavailable - sleeping"
|
||||
sleep 5
|
||||
done
|
||||
|
||||
>&2 echo "POLICIES - Elastic is up - executing command"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Add custom policies.
|
||||
##############################################################################
|
||||
|
||||
# The user must add the credentials of the users.
|
||||
# TO DO.
|
||||
# Example
|
||||
# echo "POLICIES - Add custom_user password and role:"
|
||||
# curl ${auth} -k -XPOST -H 'Content-Type: application/json' 'https://localhost:9200/_ilm/policy/my_policy?pretty' -d'
|
||||
# { "policy": { "phases": { "hot": { "actions": { "rollover": {"max_size": "50GB", "max_age": "5m"}}}}}}'
|
||||
|
81
elasticsearch/config/35-load_settings_templates.sh
Normal file
81
elasticsearch/config/35-load_settings_templates.sh
Normal file
@@ -0,0 +1,81 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
set -e
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Set Elasticsearch API url
|
||||
##############################################################################
|
||||
|
||||
if [[ "x${ELASTICSEARCH_PROTOCOL}" = "x" || "x${ELASTICSEARCH_IP}" = "x" || "x${ELASTICSEARCH_PORT}" = "x" ]]; then
|
||||
el_url="http://elasticsearch:9200"
|
||||
else
|
||||
el_url="${ELASTICSEARCH_PROTOCOL}://${ELASTICSEARCH_IP}:${ELASTICSEARCH_PORT}"
|
||||
fi
|
||||
|
||||
echo "TEMPLATES - Elasticsearch url: $el_url"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# If Elasticsearch security is enabled get the elastic user password.
|
||||
##############################################################################
|
||||
|
||||
ELASTIC_PASS=""
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
ELASTIC_PASS=${SECURITY_ELASTIC_PASSWORD}
|
||||
else
|
||||
input=${SECURITY_CREDENTIALS_FILE}
|
||||
while IFS= read -r line
|
||||
do
|
||||
if [[ $line == *"ELASTIC_PASSWORD"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
ELASTIC_PASS=${arrIN[1]}
|
||||
fi
|
||||
done < "$input"
|
||||
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# If Elasticsearch security is enabled get the users credentials.
|
||||
##############################################################################
|
||||
|
||||
# The user must get the credentials of the users.
|
||||
# TO DO.
|
||||
|
||||
##############################################################################
|
||||
# Set authentication for curl if Elasticsearch security is enabled.
|
||||
##############################################################################
|
||||
|
||||
if [ ${SECURITY_ENABLED} != "no" ]; then
|
||||
auth="-uelastic:${ELASTIC_PASS} -k"
|
||||
echo "TEMPLATES - authentication for curl established."
|
||||
elif [[ ${ENABLED_XPACK} != "true" || "x${ELASTICSEARCH_USERNAME}" = "x" || "x${ELASTICSEARCH_PASSWORD}" = "x" ]]; then
|
||||
auth=""
|
||||
echo "TEMPLATES - authentication for curl not established."
|
||||
else
|
||||
auth="--user ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD}"
|
||||
echo "TEMPLATES - authentication for curl established."
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Wait until Elasticsearch is active.
|
||||
##############################################################################
|
||||
|
||||
until curl ${auth} -XGET $el_url; do
|
||||
>&2 echo "TEMPLATES - Elastic is unavailable - sleeping"
|
||||
sleep 5
|
||||
done
|
||||
|
||||
>&2 echo "TEMPLATES - Elastic is up - executing command"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Add wazuh-alerts templates.
|
||||
##############################################################################
|
||||
|
||||
echo "TEMPLATES - Loading default wazuh-alerts template."
|
||||
cat /usr/share/elasticsearch/config/wazuh-template.json | curl -XPUT "$el_url/_template/wazuh" ${auth} -H 'Content-Type: application/json' -d @-
|
100
elasticsearch/config/35-load_settings_users_management.sh
Normal file
100
elasticsearch/config/35-load_settings_users_management.sh
Normal file
@@ -0,0 +1,100 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
set -e
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Set Elasticsearch API url
|
||||
##############################################################################
|
||||
|
||||
if [[ "x${ELASTICSEARCH_PROTOCOL}" = "x" || "x${ELASTICSEARCH_IP}" = "x" || "x${ELASTICSEARCH_PORT}" = "x" ]]; then
|
||||
el_url="http://elasticsearch:9200"
|
||||
else
|
||||
el_url="${ELASTICSEARCH_PROTOCOL}://${ELASTICSEARCH_IP}:${ELASTICSEARCH_PORT}"
|
||||
fi
|
||||
|
||||
echo "USERS - Elasticsearch url: $el_url"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# If Elasticsearch security is enabled get the elastic user password.
|
||||
##############################################################################
|
||||
|
||||
ELASTIC_PASS=""
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
ELASTIC_PASS=${SECURITY_ELASTIC_PASSWORD}
|
||||
else
|
||||
input=${SECURITY_CREDENTIALS_FILE}
|
||||
while IFS= read -r line
|
||||
do
|
||||
if [[ $line == *"ELASTIC_PASSWORD"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
ELASTIC_PASS=${arrIN[1]}
|
||||
fi
|
||||
done < "$input"
|
||||
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# If Elasticsearch security is enabled get the users credentials.
|
||||
##############################################################################
|
||||
|
||||
# The user must get the credentials of the users.
|
||||
# TO DO.
|
||||
|
||||
##############################################################################
|
||||
# Set authentication for curl if Elasticsearch security is enabled.
|
||||
##############################################################################
|
||||
|
||||
if [ ${SECURITY_ENABLED} != "no" ]; then
|
||||
auth="-uelastic:${ELASTIC_PASS} -k"
|
||||
echo "USERS - authentication for curl established."
|
||||
elif [[ ${ENABLED_XPACK} != "true" || "x${ELASTICSEARCH_USERNAME}" = "x" || "x${ELASTICSEARCH_PASSWORD}" = "x" ]]; then
|
||||
auth=""
|
||||
echo "USERS - authentication for curl not established."
|
||||
else
|
||||
auth="--user ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD}"
|
||||
echo "USERS - authentication for curl established."
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Wait until Elasticsearch is active.
|
||||
##############################################################################
|
||||
|
||||
until curl ${auth} -XGET $el_url; do
|
||||
>&2 echo "USERS - Elastic is unavailable - sleeping"
|
||||
sleep 5
|
||||
done
|
||||
|
||||
>&2 echo "USERS - Elastic is up - executing command"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Setup passwords for Elastic Stack users.
|
||||
##############################################################################
|
||||
|
||||
# The user must add the credentials of the users.
|
||||
# TO DO.
|
||||
# Example
|
||||
# echo "USERS - Add custom_user password and role:"
|
||||
# curl ${auth} -k -XPOST -H 'Content-Type: application/json' 'https://localhost:9200/_xpack/security/role/custom_user_role ' -d '
|
||||
# { "indices": [ { "names": [ ".kibana*" ], "privileges": ["read"] }, { "names": [ "wazuh-monitoring*"], "privileges": ["all"] }] }'
|
||||
# curl ${auth} -k -XPOST -H 'Content-Type: application/json' 'https://localhost:9200/_xpack/security/user/custom_user' -d '
|
||||
# { "password":"'$CUSTOM_USER_PASSWORD'", "roles" : [ "kibana_system", "custom_user_role"], "full_name" : "Custom User" }'
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Remove credentials file.
|
||||
##############################################################################
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
echo "USERS - Security credentials file not used. Nothing to do."
|
||||
else
|
||||
shred -zvu ${SECURITY_CREDENTIALS_FILE}
|
||||
echo "USERS - Security credentials file removed."
|
||||
fi
|
||||
|
132
elasticsearch/config/TEST_openssl.cnf
Normal file
132
elasticsearch/config/TEST_openssl.cnf
Normal file
@@ -0,0 +1,132 @@
|
||||
# OpenSSL intermediate CA configuration file.
|
||||
# Copy to `/root/ca/intermediate/openssl.cnf`.
|
||||
|
||||
[ ca ]
|
||||
# `man ca`
|
||||
default_ca = CA_default
|
||||
|
||||
[ CA_default ]
|
||||
# Directory and file locations.
|
||||
dir = /root/ca/intermediate
|
||||
certs = $dir/certs
|
||||
crl_dir = $dir/crl
|
||||
new_certs_dir = $dir/newcerts
|
||||
database = $dir/index.txt
|
||||
serial = $dir/serial
|
||||
RANDFILE = $dir/private/.rand
|
||||
|
||||
# The root key and root certificate.
|
||||
private_key = $dir/private/intermediate.key.pem
|
||||
certificate = $dir/certs/intermediate.cert.pem
|
||||
|
||||
# For certificate revocation lists.
|
||||
crlnumber = $dir/crlnumber
|
||||
crl = $dir/crl/intermediate.crl.pem
|
||||
crl_extensions = crl_ext
|
||||
default_crl_days = 30
|
||||
|
||||
# SHA-1 is deprecated, so use SHA-2 instead.
|
||||
default_md = sha256
|
||||
|
||||
name_opt = ca_default
|
||||
cert_opt = ca_default
|
||||
default_days = 375
|
||||
preserve = no
|
||||
policy = policy_loose
|
||||
|
||||
[ policy_strict ]
|
||||
# The root CA should only sign intermediate certificates that match.
|
||||
# See the POLICY FORMAT section of `man ca`.
|
||||
countryName = match
|
||||
stateOrProvinceName = match
|
||||
organizationName = match
|
||||
organizationalUnitName = optional
|
||||
commonName = supplied
|
||||
emailAddress = optional
|
||||
|
||||
[ policy_loose ]
|
||||
# Allow the intermediate CA to sign a more diverse range of certificates.
|
||||
# See the POLICY FORMAT section of the `ca` man page.
|
||||
countryName = optional
|
||||
stateOrProvinceName = optional
|
||||
localityName = optional
|
||||
organizationName = optional
|
||||
organizationalUnitName = optional
|
||||
commonName = supplied
|
||||
emailAddress = optional
|
||||
|
||||
[ req ]
|
||||
# Options for the `req` tool (`man req`).
|
||||
default_bits = 2048
|
||||
distinguished_name = req_distinguished_name
|
||||
string_mask = utf8only
|
||||
|
||||
# SHA-1 is deprecated, so use SHA-2 instead.
|
||||
default_md = sha256
|
||||
|
||||
# Extension to add when the -x509 option is used.
|
||||
x509_extensions = v3_ca
|
||||
|
||||
[ req_distinguished_name ]
|
||||
# See <https://en.wikipedia.org/wiki/Certificate_signing_request>.
|
||||
countryName = Country Name (2 letter code)
|
||||
stateOrProvinceName = State or Province Name
|
||||
localityName = Locality Name
|
||||
0.organizationName = Organization Name
|
||||
organizationalUnitName = Organizational Unit Name
|
||||
commonName = Common Name
|
||||
emailAddress = Email Address
|
||||
|
||||
# Optionally, specify some defaults.
|
||||
countryName_default = GB
|
||||
stateOrProvinceName_default = England
|
||||
localityName_default =
|
||||
0.organizationName_default = Alice Ltd
|
||||
organizationalUnitName_default =
|
||||
emailAddress_default =
|
||||
|
||||
[ v3_ca ]
|
||||
# Extensions for a typical CA (`man x509v3_config`).
|
||||
subjectKeyIdentifier = hash
|
||||
authorityKeyIdentifier = keyid:always,issuer
|
||||
basicConstraints = critical, CA:true
|
||||
keyUsage = critical, digitalSignature, cRLSign, keyCertSign
|
||||
|
||||
[ v3_intermediate_ca ]
|
||||
# Extensions for a typical intermediate CA (`man x509v3_config`).
|
||||
subjectKeyIdentifier = hash
|
||||
authorityKeyIdentifier = keyid:always,issuer
|
||||
basicConstraints = critical, CA:true, pathlen:0
|
||||
keyUsage = critical, digitalSignature, cRLSign, keyCertSign
|
||||
|
||||
[ usr_cert ]
|
||||
# Extensions for client certificates (`man x509v3_config`).
|
||||
basicConstraints = CA:FALSE
|
||||
nsCertType = client, email
|
||||
nsComment = "OpenSSL Generated Client Certificate"
|
||||
subjectKeyIdentifier = hash
|
||||
authorityKeyIdentifier = keyid,issuer
|
||||
keyUsage = critical, nonRepudiation, digitalSignature, keyEncipherment
|
||||
extendedKeyUsage = clientAuth, emailProtection
|
||||
|
||||
[ server_cert ]
|
||||
# Extensions for server certificates (`man x509v3_config`).
|
||||
basicConstraints = CA:FALSE
|
||||
nsCertType = server
|
||||
nsComment = "OpenSSL Generated Server Certificate"
|
||||
subjectKeyIdentifier = hash
|
||||
authorityKeyIdentifier = keyid,issuer:always
|
||||
keyUsage = critical, digitalSignature, keyEncipherment
|
||||
extendedKeyUsage = serverAuth
|
||||
|
||||
[ crl_ext ]
|
||||
# Extension for CRLs (`man x509v3_config`).
|
||||
authorityKeyIdentifier=keyid:always
|
||||
|
||||
[ ocsp ]
|
||||
# Extension for OCSP signing certificates (`man ocsp`).
|
||||
basicConstraints = CA:FALSE
|
||||
subjectKeyIdentifier = hash
|
||||
authorityKeyIdentifier = keyid,issuer
|
||||
keyUsage = critical, digitalSignature
|
||||
extendedKeyUsage = critical, OCSPSigning
|
8
elasticsearch/config/entrypoint.sh
Normal file
8
elasticsearch/config/entrypoint.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
# It will run every .sh script located in entrypoint-scripts folder in lexicographical order
|
||||
for script in `ls /entrypoint-scripts/*.sh | sort -n`; do
|
||||
bash "$script"
|
||||
|
||||
done
|
Binary file not shown.
Before Width: | Height: | Size: 81 KiB |
Binary file not shown.
Before Width: | Height: | Size: 86 KiB |
@@ -1,7 +1,101 @@
|
||||
FROM kibana:5.4.2
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
FROM docker.elastic.co/kibana/kibana:7.4.2
|
||||
ARG ELASTIC_VERSION=7.4.2
|
||||
ARG WAZUH_VERSION=3.11.5
|
||||
ARG WAZUH_APP_VERSION="${WAZUH_VERSION}_${ELASTIC_VERSION}"
|
||||
|
||||
RUN apt-get update && apt-get install -y curl
|
||||
USER root
|
||||
|
||||
COPY ./config/kibana.yml /opt/kibana/config/kibana.yml
|
||||
COPY config/wazuhapp-${WAZUH_APP_VERSION}.zip /tmp
|
||||
USER kibana
|
||||
#RUN /usr/share/kibana/bin/kibana-plugin install --allow-root https://packages.wazuh.com/wazuhapp/wazuhapp-${WAZUH_APP_VERSION}.zip
|
||||
RUN /usr/share/kibana/bin/kibana-plugin install --allow-root file:///tmp/wazuhapp-${WAZUH_APP_VERSION}.zip
|
||||
USER root
|
||||
RUN rm -rf /tmp/wazuhapp-${WAZUH_APP_VERSION}.zip
|
||||
|
||||
COPY config/wait-for-it.sh /
|
||||
COPY config/entrypoint.sh ./entrypoint.sh
|
||||
RUN chmod 755 ./entrypoint.sh
|
||||
RUN mkdir /entrypoint-scripts
|
||||
|
||||
USER kibana
|
||||
|
||||
ENV CONFIGURATION_FROM_FILE="false"
|
||||
|
||||
ENV PATTERN="" \
|
||||
CHECKS_PATTERN="" \
|
||||
CHECKS_TEMPLATE="" \
|
||||
CHECKS_API="" \
|
||||
CHECKS_SETUP="" \
|
||||
EXTENSIONS_PCI="" \
|
||||
EXTENSIONS_GDPR="" \
|
||||
EXTENSIONS_AUDIT="" \
|
||||
EXTENSIONS_OSCAP="" \
|
||||
EXTENSIONS_CISCAT="" \
|
||||
EXTENSIONS_AWS="" \
|
||||
EXTENSIONS_VIRUSTOTAL="" \
|
||||
EXTENSIONS_OSQUERY="" \
|
||||
APP_TIMEOUT="" \
|
||||
WAZUH_SHARDS="" \
|
||||
WAZUH_REPLICAS="" \
|
||||
WAZUH_VERSION_SHARDS="" \
|
||||
WAZUH_VERSION_REPLICAS="" \
|
||||
IP_SELECTOR="" \
|
||||
IP_IGNORE="" \
|
||||
XPACK_RBAC_ENABLED="" \
|
||||
WAZUH_MONITORING_ENABLED="" \
|
||||
WAZUH_MONITORING_FREQUENCY="" \
|
||||
WAZUH_MONITORING_SHARDS="" \
|
||||
WAZUH_MONITORING_REPLICAS="" \
|
||||
ADMIN_PRIVILEGES="" \
|
||||
API_SELECTOR=""
|
||||
|
||||
ARG XPACK_CANVAS="false"
|
||||
ARG XPACK_LOGS="false"
|
||||
ARG XPACK_INFRA="false"
|
||||
ARG XPACK_ML="false"
|
||||
ARG XPACK_DEVTOOLS="false"
|
||||
ARG XPACK_MONITORING="false"
|
||||
ARG XPACK_APM="false"
|
||||
ARG XPACK_MAPS="false"
|
||||
ARG XPACK_UPTIME="false"
|
||||
ARG XPACK_SIEM="false"
|
||||
|
||||
ARG CHANGE_WELCOME="true"
|
||||
|
||||
COPY --chown=kibana:kibana ./config/05-decrypt_credentials.sh /entrypoint-scripts/05-decrypt_credentials.sh
|
||||
COPY --chown=kibana:kibana ./config/10-wazuh_app_config.sh /entrypoint-scripts/10-wazuh_app_config.sh
|
||||
COPY --chown=kibana:kibana ./config/12-custom_logos.sh /entrypoint-scripts/12-custom_logos.sh
|
||||
COPY --chown=kibana:kibana ./config/20-entrypoint.sh /entrypoint-scripts/20-entrypoint.sh
|
||||
COPY --chown=kibana:kibana ./config/20-entrypoint_kibana_settings.sh ./
|
||||
COPY --chown=kibana:kibana ./config/20-entrypoint_certs_management.sh ./
|
||||
RUN chmod +x /entrypoint-scripts/05-decrypt_credentials.sh && \
|
||||
chmod +x /entrypoint-scripts/10-wazuh_app_config.sh && \
|
||||
chmod +x /entrypoint-scripts/12-custom_logos.sh && \
|
||||
chmod +x /entrypoint-scripts/20-entrypoint.sh && \
|
||||
chmod +x ./20-entrypoint_kibana_settings.sh && \
|
||||
chmod +x ./20-entrypoint_certs_management.sh
|
||||
|
||||
COPY --chown=kibana:kibana ./config/xpack_config.sh ./
|
||||
|
||||
RUN chmod +x ./xpack_config.sh
|
||||
|
||||
RUN ./xpack_config.sh
|
||||
|
||||
COPY --chown=kibana:kibana ./config/welcome_wazuh.sh ./
|
||||
|
||||
RUN chmod +x ./welcome_wazuh.sh
|
||||
|
||||
RUN ./welcome_wazuh.sh
|
||||
|
||||
RUN /usr/local/bin/kibana-docker --optimize
|
||||
|
||||
USER root
|
||||
|
||||
RUN chmod 660 /usr/share/kibana/plugins/wazuh/wazuh.yml && \
|
||||
chmod 775 /usr/share/kibana/plugins/wazuh && \
|
||||
chown root:kibana /usr/share/kibana/plugins/wazuh/wazuh.yml && \
|
||||
chown root:kibana /usr/share/kibana/plugins/wazuh
|
||||
|
||||
USER kibana
|
||||
|
||||
ENTRYPOINT ./entrypoint.sh
|
||||
|
15
kibana/config/05-decrypt_credentials.sh
Normal file
15
kibana/config/05-decrypt_credentials.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Decrypt credentials.
|
||||
# If the credentials of the users to be created are encrypted,
|
||||
# they must be decrypted for later use.
|
||||
##############################################################################
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
echo "Security credentials file not used. Nothing to do."
|
||||
else
|
||||
echo "TO DO"
|
||||
fi
|
||||
# TO DO
|
115
kibana/config/10-wazuh_app_config.sh
Normal file
115
kibana/config/10-wazuh_app_config.sh
Normal file
@@ -0,0 +1,115 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2020 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# If Elasticsearch security is enabled get the kibana user, the Kibana
|
||||
# password and WAZUH API credentials.
|
||||
##############################################################################
|
||||
|
||||
KIBANA_USER=""
|
||||
KIBANA_PASS=""
|
||||
WAZH_API_USER=""
|
||||
WAZH_API_PASS=""
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
KIBANA_USER=${SECURITY_KIBANA_USER}
|
||||
KIBANA_PASS=${SECURITY_KIBANA_PASS}
|
||||
WAZH_API_USER=${API_USER}
|
||||
WAZH_API_PASS=${API_PASS}
|
||||
echo "USERS - Credentials obtained from environment variables."
|
||||
else
|
||||
input=${SECURITY_CREDENTIALS_FILE}
|
||||
while IFS= read -r line
|
||||
do
|
||||
if [[ $line == *"KIBANA_USER"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
KIBANA_USER=${arrIN[1]}
|
||||
elif [[ $line == *"KIBANA_PASSWORD"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
KIBANA_PASS=${arrIN[1]}
|
||||
elif [[ $line == *"WAZUH_API_USER"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
WAZH_API_USER=${arrIN[1]}
|
||||
elif [[ $line == *"WAZUH_API_PASSWORD"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
WAZH_API_PASS=${arrIN[1]}
|
||||
fi
|
||||
done < "$input"
|
||||
echo "USERS - Credentials obtained from file."
|
||||
fi
|
||||
|
||||
##############################################################################
|
||||
# Establish the way to run the curl command, with or without authentication.
|
||||
##############################################################################
|
||||
|
||||
if [ ${SECURITY_ENABLED} != "no" ]; then
|
||||
auth="-u ${KIBANA_USER}:${KIBANA_PASS} -k"
|
||||
elif [ ${ENABLED_XPACK} != "true" || "x${ELASTICSEARCH_USERNAME}" = "x" || "x${ELASTICSEARCH_PASSWORD}" = "x" ]; then
|
||||
auth=""
|
||||
else
|
||||
auth="--user ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD}"
|
||||
fi
|
||||
|
||||
##############################################################################
|
||||
# Set custom wazuh.yml config
|
||||
##############################################################################
|
||||
|
||||
kibana_config_file="/usr/share/kibana/plugins/wazuh/wazuh.yml"
|
||||
|
||||
declare -A CONFIG_MAP=(
|
||||
[pattern]=$PATTERN
|
||||
[checks.pattern]=$CHECKS_PATTERN
|
||||
[checks.template]=$CHECKS_TEMPLATE
|
||||
[checks.api]=$CHECKS_API
|
||||
[checks.setup]=$CHECKS_SETUP
|
||||
[extensions.pci]=$EXTENSIONS_PCI
|
||||
[extensions.gdpr]=$EXTENSIONS_GDPR
|
||||
[extensions.audit]=$EXTENSIONS_AUDIT
|
||||
[extensions.oscap]=$EXTENSIONS_OSCAP
|
||||
[extensions.ciscat]=$EXTENSIONS_CISCAT
|
||||
[extensions.aws]=$EXTENSIONS_AWS
|
||||
[extensions.virustotal]=$EXTENSIONS_VIRUSTOTAL
|
||||
[extensions.osquery]=$EXTENSIONS_OSQUERY
|
||||
[timeout]=$APP_TIMEOUT
|
||||
[wazuh.shards]=$WAZUH_SHARDS
|
||||
[wazuh.replicas]=$WAZUH_REPLICAS
|
||||
[wazuh-version.shards]=$WAZUH_VERSION_SHARDS
|
||||
[wazuh-version.replicas]=$WAZUH_VERSION_REPLICAS
|
||||
[ip.selector]=$IP_SELECTOR
|
||||
[ip.ignore]=$IP_IGNORE
|
||||
[xpack.rbac.enabled]=$XPACK_RBAC_ENABLED
|
||||
[wazuh.monitoring.enabled]=$WAZUH_MONITORING_ENABLED
|
||||
[wazuh.monitoring.frequency]=$WAZUH_MONITORING_FREQUENCY
|
||||
[wazuh.monitoring.shards]=$WAZUH_MONITORING_SHARDS
|
||||
[wazuh.monitoring.replicas]=$WAZUH_MONITORING_REPLICAS
|
||||
[admin]=$ADMIN_PRIVILEGES
|
||||
[api.selector]=$API_SELECTOR
|
||||
)
|
||||
|
||||
for i in "${!CONFIG_MAP[@]}"
|
||||
do
|
||||
if [ "${CONFIG_MAP[$i]}" != "" ]; then
|
||||
sed -i 's/.*#'"$i"'.*/'"$i"': '"${CONFIG_MAP[$i]}"'/' $kibana_config_file
|
||||
fi
|
||||
done
|
||||
|
||||
# remove default API entry (new in 3.11.0_7.5.1)
|
||||
sed -ie '/- default:/,+4d' $kibana_config_file
|
||||
|
||||
# If this is an update to 3.11
|
||||
CONFIG_CODE=$(curl -s -o /dev/null -w "%{http_code}" -XGET $ELASTICSEARCH_URL/.wazuh/_doc/1513629884013 ${auth})
|
||||
|
||||
grep -q 1513629884013 $kibana_config_file
|
||||
_config_exists=$?
|
||||
|
||||
if [[ "x$CONFIG_CODE" != "x200" && $_config_exists -ne 0 ]]; then
|
||||
cat << EOF >> $kibana_config_file
|
||||
- 1:
|
||||
url: https://wazuh
|
||||
port: 55000
|
||||
user: $WAZH_API_USER
|
||||
password: $WAZH_API_PASS
|
||||
EOF
|
||||
else
|
||||
echo "Wazuh APP already configured"
|
||||
fi
|
14
kibana/config/12-custom_logos.sh
Normal file
14
kibana/config/12-custom_logos.sh
Normal file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Kibana logos
|
||||
##############################################################################
|
||||
|
||||
if [[ $CUSTOM_LOGO == "true" ]]; then
|
||||
|
||||
|
||||
echo "CUSTOM LOGO - Change Kibana logos."
|
||||
# TO DO
|
||||
|
||||
fi
|
126
kibana/config/20-entrypoint.sh
Normal file
126
kibana/config/20-entrypoint.sh
Normal file
@@ -0,0 +1,126 @@
|
||||
#!/bin/bash
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
set -e
|
||||
|
||||
##############################################################################
|
||||
# Set Elasticsearch API url.
|
||||
##############################################################################
|
||||
|
||||
if [ "x${ELASTICSEARCH_URL}" = "x" ]; then
|
||||
el_url="http://elasticsearch:9200"
|
||||
else
|
||||
el_url="${ELASTICSEARCH_URL}"
|
||||
fi
|
||||
|
||||
echo "ENTRYPOINT - Set Elasticsearc url:${ELASTICSEARCH_URL}"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# If there are credentials for Kibana they are obtained.
|
||||
##############################################################################
|
||||
|
||||
KIBANA_USER=""
|
||||
KIBANA_PASS=""
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
KIBANA_USER=${SECURITY_KIBANA_USER}
|
||||
KIBANA_PASS=${SECURITY_KIBANA_PASS}
|
||||
else
|
||||
input=${SECURITY_CREDENTIALS_FILE}
|
||||
while IFS= read -r line
|
||||
do
|
||||
if [[ $line == *"KIBANA_PASSWORD"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
KIBANA_PASS=${arrIN[1]}
|
||||
elif [[ $line == *"KIBANA_USER"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
KIBANA_USER=${arrIN[1]}
|
||||
fi
|
||||
done < "$input"
|
||||
|
||||
fi
|
||||
|
||||
echo "ENTRYPOINT - Kibana credentials obtained."
|
||||
|
||||
##############################################################################
|
||||
# Establish the way to run the curl command, with or without authentication.
|
||||
##############################################################################
|
||||
|
||||
if [ ${SECURITY_ENABLED} != "no" ]; then
|
||||
auth="-u ${KIBANA_USER}:${KIBANA_PASS} -k"
|
||||
elif [ ${ENABLED_XPACK} != "true" || "x${ELASTICSEARCH_USERNAME}" = "x" || "x${ELASTICSEARCH_PASSWORD}" = "x" ]; then
|
||||
auth=""
|
||||
else
|
||||
auth="--user ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD}"
|
||||
fi
|
||||
|
||||
echo "ENTRYPOINT - Kibana authentication established."
|
||||
|
||||
##############################################################################
|
||||
# Waiting for elasticsearch.
|
||||
##############################################################################
|
||||
|
||||
until curl -XGET $el_url ${auth}; do
|
||||
>&2 echo "ENTRYPOINT - Elastic is unavailable: sleeping"
|
||||
sleep 5
|
||||
done
|
||||
|
||||
sleep 2
|
||||
|
||||
>&2 echo "ENTRYPOINT - Elasticsearch is up."
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Waiting for wazuh alerts template.
|
||||
##############################################################################
|
||||
|
||||
strlen=0
|
||||
|
||||
while [[ $strlen -eq 0 ]]
|
||||
do
|
||||
template=$(curl $auth $el_url/_cat/templates/wazuh -s)
|
||||
strlen=${#template}
|
||||
>&2 echo "ENTRYPOINT - Wazuh alerts template not loaded - sleeping."
|
||||
sleep 2
|
||||
done
|
||||
|
||||
sleep 2
|
||||
|
||||
>&2 echo "ENTRYPOINT - Wazuh alerts template is loaded."
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Create keystore if security is enabled.
|
||||
##############################################################################
|
||||
|
||||
if [[ $SECURITY_ENABLED == "yes" ]]; then
|
||||
|
||||
echo "ENTRYPOINT - Create Keystore."
|
||||
/usr/share/kibana/bin/kibana-keystore create
|
||||
# Add keys to keystore
|
||||
echo -e "$KIBANA_PASS" | /usr/share/kibana/bin/kibana-keystore add elasticsearch.password --stdin
|
||||
echo -e "$KIBANA_USER" | /usr/share/kibana/bin/kibana-keystore add elasticsearch.username --stdin
|
||||
|
||||
echo "ENTRYPOINT - Keystore created."
|
||||
fi
|
||||
|
||||
##############################################################################
|
||||
# If security is enabled set Kibana configuration.
|
||||
# Create the ssl certificate.
|
||||
##############################################################################
|
||||
|
||||
if [[ $SECURITY_ENABLED == "yes" ]]; then
|
||||
|
||||
bash /usr/share/kibana/20-entrypoint_certs_management.sh
|
||||
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Run kibana_settings.sh script.
|
||||
##############################################################################
|
||||
|
||||
bash /usr/share/kibana/20-entrypoint_kibana_settings.sh &
|
||||
|
||||
/usr/local/bin/kibana-docker
|
14
kibana/config/20-entrypoint_certs_management.sh
Normal file
14
kibana/config/20-entrypoint_certs_management.sh
Normal file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Kibana certs and keystore management
|
||||
##############################################################################
|
||||
|
||||
if [[ $SECURITY_ENABLED == "yes" ]]; then
|
||||
|
||||
|
||||
echo "CERTS_MANAGEMENT - Create certificates. TO DO."
|
||||
# TO DO
|
||||
|
||||
fi
|
183
kibana/config/20-entrypoint_kibana_settings.sh
Normal file
183
kibana/config/20-entrypoint_kibana_settings.sh
Normal file
@@ -0,0 +1,183 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
WAZUH_MAJOR=3
|
||||
|
||||
##############################################################################
|
||||
# Wait for the Kibana API to start. It is necessary to do it in this container
|
||||
# because the others are running Elastic Stack and we can not interrupt them.
|
||||
#
|
||||
# The following actions are performed:
|
||||
#
|
||||
# Add the wazuh alerts index as default.
|
||||
# Set the Discover time interval to 24 hours instead of 15 minutes.
|
||||
# Do not ask user to help providing usage statistics to Elastic.
|
||||
##############################################################################
|
||||
|
||||
##############################################################################
|
||||
# Customize elasticsearch ip
|
||||
##############################################################################
|
||||
if [[ "$ELASTICSEARCH_KIBANA_IP" != "" && "$CONFIGURATION_FROM_FILE" == "false" ]]; then
|
||||
sed -i "s:#elasticsearch.hosts:elasticsearch.hosts:g" /usr/share/kibana/config/kibana.yml
|
||||
sed -i 's|http://elasticsearch:9200|'$ELASTICSEARCH_KIBANA_IP'|g' /usr/share/kibana/config/kibana.yml
|
||||
fi
|
||||
|
||||
echo "SETTINGS - Update Elasticsearch host."
|
||||
|
||||
# If KIBANA_INDEX was set, then change the default index in kibana.yml configuration file. If there was an index, then delete it and recreate.
|
||||
if [[ "$KIBANA_INDEX" != "" && "$CONFIGURATION_FROM_FILE" == "false" ]]; then
|
||||
if grep -q 'kibana.index' /usr/share/kibana/config/kibana.yml; then
|
||||
sed -i '/kibana.index/d' /usr/share/kibana/config/kibana.yml
|
||||
fi
|
||||
echo "kibana.index: $KIBANA_INDEX" >> /usr/share/kibana/config/kibana.yml
|
||||
fi
|
||||
|
||||
# If XPACK_SECURITY_ENABLED was set, then change the xpack.security.enabled option from true (default) to false.
|
||||
if [[ "$XPACK_SECURITY_ENABLED" != "" && "$CONFIGURATION_FROM_FILE" == "false" ]]; then
|
||||
if grep -q 'xpack.security.enabled' /usr/share/kibana/config/kibana.yml; then
|
||||
sed -i '/xpack.security.enabled/d' /usr/share/kibana/config/kibana.yml
|
||||
fi
|
||||
echo "xpack.security.enabled: $XPACK_SECURITY_ENABLED" >> /usr/share/kibana/config/kibana.yml
|
||||
fi
|
||||
|
||||
##############################################################################
|
||||
# Get Kibana credentials
|
||||
##############################################################################
|
||||
|
||||
if [ "$KIBANA_IP" != "" ]; then
|
||||
kibana_ip="$KIBANA_IP"
|
||||
else
|
||||
kibana_ip="kibana"
|
||||
fi
|
||||
|
||||
KIBANA_USER=""
|
||||
KIBANA_PASS=""
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
KIBANA_USER=${SECURITY_KIBANA_USER}
|
||||
KIBANA_PASS=${SECURITY_KIBANA_PASS}
|
||||
else
|
||||
input=${SECURITY_CREDENTIALS_FILE}
|
||||
while IFS= read -r line
|
||||
do
|
||||
if [[ $line == *"KIBANA_PASSWORD"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
KIBANA_PASS=${arrIN[1]}
|
||||
elif [[ $line == *"KIBANA_USER"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
KIBANA_USER=${arrIN[1]}
|
||||
fi
|
||||
done < "$input"
|
||||
|
||||
fi
|
||||
|
||||
echo "SETTINGS - Kibana credentials obtained."
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Set url authentication.
|
||||
##############################################################################
|
||||
|
||||
if [ ${SECURITY_ENABLED} != "no" ]; then
|
||||
auth="-k -u $KIBANA_USER:${KIBANA_PASS}"
|
||||
kibana_secure_ip="https://$kibana_ip"
|
||||
else
|
||||
auth=""
|
||||
kibana_secure_ip="http://$kibana_ip"
|
||||
fi
|
||||
|
||||
echo "SETTINGS - Kibana authentication established."
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Waiting for Kibana.
|
||||
##############################################################################
|
||||
|
||||
while [[ "$(curl $auth -XGET -I -s -o /dev/null -w ''%{http_code}'' $kibana_secure_ip:5601/status)" != "200" ]]; do
|
||||
echo "SETTINGS - Waiting for Kibana API. Sleeping 5 seconds"
|
||||
sleep 5
|
||||
done
|
||||
|
||||
echo "SETTINGS - Kibana API is running"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Prepare index selection.
|
||||
##############################################################################
|
||||
|
||||
echo "SETTINGS - Prepare index selection."
|
||||
|
||||
default_index="/tmp/default_index.json"
|
||||
|
||||
if [[ $PATTERN == "" ]]; then
|
||||
|
||||
cat > ${default_index} << EOF
|
||||
{
|
||||
"changes": {
|
||||
"defaultIndex": "wazuh-alerts-${WAZUH_MAJOR}.x-*"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
else
|
||||
|
||||
cat > ${default_index} << EOF
|
||||
{
|
||||
"changes": {
|
||||
"defaultIndex": "$PATTERN"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
fi
|
||||
|
||||
|
||||
sleep 5
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Add the wazuh alerts index as default.
|
||||
##############################################################################
|
||||
|
||||
echo "SETTINGS - Add the wazuh alerts index as default."
|
||||
|
||||
curl $auth -POST "$kibana_secure_ip:5601/api/kibana/settings" -H "Content-Type: application/json" -H "kbn-xsrf: true" -d@${default_index}
|
||||
rm -f ${default_index}
|
||||
|
||||
sleep 5
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Configuring Kibana TimePicker.
|
||||
##############################################################################
|
||||
|
||||
echo "SETTINGS - Configuring Kibana TimePicker."
|
||||
|
||||
curl $auth -POST "$kibana_secure_ip:5601/api/kibana/settings" -H "Content-Type: application/json" -H "kbn-xsrf: true" -d \
|
||||
'{"changes":{"timepicker:timeDefaults":"{\n \"from\": \"now-24h\",\n \"to\": \"now\",\n \"mode\": \"quick\"}"}}'
|
||||
|
||||
sleep 5
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Do not ask user to help providing usage statistics to Elastic.
|
||||
##############################################################################
|
||||
|
||||
echo "SETTINGS - Do not ask user to help providing usage statistics to Elastic."
|
||||
|
||||
curl $auth -POST "$kibana_secure_ip:5601/api/telemetry/v2/optIn" -H "Content-Type: application/json" -H "kbn-xsrf: true" -d '{"enabled":false}'
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Remove credentials file.
|
||||
##############################################################################
|
||||
|
||||
echo "SETTINGS - Remove credentials file."
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
echo "Security credentials file not used. Nothing to do."
|
||||
else
|
||||
shred -zvu ${SECURITY_CREDENTIALS_FILE}
|
||||
fi
|
||||
|
||||
echo "End settings"
|
8
kibana/config/entrypoint.sh
Normal file
8
kibana/config/entrypoint.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
# It will run every .sh script located in entrypoint-scripts folder in lexicographical order
|
||||
for script in `ls /entrypoint-scripts/*.sh | sort -n`; do
|
||||
bash "$script"
|
||||
|
||||
done
|
@@ -1,92 +0,0 @@
|
||||
# Kibana is served by a back end server. This setting specifies the port to use.
|
||||
server.port: 5601
|
||||
|
||||
# This setting specifies the IP address of the back end server.
|
||||
server.host: "0.0.0.0"
|
||||
|
||||
# Enables you to specify a path to mount Kibana at if you are running behind a proxy. This setting
|
||||
# cannot end in a slash.
|
||||
# server.basePath: ""
|
||||
|
||||
# The maximum payload size in bytes for incoming server requests.
|
||||
# server.maxPayloadBytes: 1048576
|
||||
|
||||
# The Kibana server's name. This is used for display purposes.
|
||||
# server.name: "your-hostname"
|
||||
|
||||
# The URL of the Elasticsearch instance to use for all your queries.
|
||||
elasticsearch.url: "http://elasticsearch:9200"
|
||||
|
||||
# When this setting’s value is true Kibana uses the hostname specified in the server.host
|
||||
# setting. When the value of this setting is false, Kibana uses the hostname of the host
|
||||
# that connects to this Kibana instance.
|
||||
# elasticsearch.preserveHost: true
|
||||
|
||||
# Kibana uses an index in Elasticsearch to store saved searches, visualizations and
|
||||
# dashboards. Kibana creates a new index if the index doesn’t already exist.
|
||||
# kibana.index: ".kibana"
|
||||
|
||||
# The default application to load.
|
||||
# kibana.defaultAppId: "discover"
|
||||
|
||||
# If your Elasticsearch is protected with basic authentication, these settings provide
|
||||
# the username and password that the Kibana server uses to perform maintenance on the Kibana
|
||||
# index at startup. Your Kibana users still need to authenticate with Elasticsearch, which
|
||||
# is proxied through the Kibana server.
|
||||
# elasticsearch.username: "user"
|
||||
# elasticsearch.password: "pass"
|
||||
|
||||
# Paths to the PEM-format SSL certificate and SSL key files, respectively. These
|
||||
# files enable SSL for outgoing requests from the Kibana server to the browser.
|
||||
# server.ssl.cert: /path/to/your/server.crt
|
||||
# server.ssl.key: /path/to/your/server.key
|
||||
|
||||
# Optional settings that provide the paths to the PEM-format SSL certificate and key files.
|
||||
# These files validate that your Elasticsearch backend uses the same key files.
|
||||
# elasticsearch.ssl.cert: /path/to/your/client.crt
|
||||
# elasticsearch.ssl.key: /path/to/your/client.key
|
||||
|
||||
# Optional setting that enables you to specify a path to the PEM file for the certificate
|
||||
# authority for your Elasticsearch instance.
|
||||
# elasticsearch.ssl.ca: /path/to/your/CA.pem
|
||||
|
||||
# To disregard the validity of SSL certificates, change this setting’s value to false.
|
||||
# elasticsearch.ssl.verify: true
|
||||
|
||||
# Time in milliseconds to wait for Elasticsearch to respond to pings. Defaults to the value of
|
||||
# the elasticsearch.requestTimeout setting.
|
||||
# elasticsearch.pingTimeout: 1500
|
||||
|
||||
# Time in milliseconds to wait for responses from the back end or Elasticsearch. This value
|
||||
# must be a positive integer.
|
||||
# elasticsearch.requestTimeout: 30000
|
||||
|
||||
# List of Kibana client-side headers to send to Elasticsearch. To send *no* client-side
|
||||
# headers, set this value to [] (an empty list).
|
||||
# elasticsearch.requestHeadersWhitelist: [ authorization ]
|
||||
|
||||
# Time in milliseconds for Elasticsearch to wait for responses from shards. Set to 0 to disable.
|
||||
# elasticsearch.shardTimeout: 0
|
||||
|
||||
# Time in milliseconds to wait for Elasticsearch at Kibana startup before retrying.
|
||||
# elasticsearch.startupTimeout: 5000
|
||||
|
||||
# Specifies the path where Kibana creates the process ID file.
|
||||
# pid.file: /var/run/kibana.pid
|
||||
|
||||
# Enables you specify a file where Kibana stores log output.
|
||||
# logging.dest: stdout
|
||||
|
||||
# Set the value of this setting to true to suppress all logging output.
|
||||
# logging.silent: false
|
||||
|
||||
# Set the value of this setting to true to suppress all logging output other than error messages.
|
||||
# logging.quiet: false
|
||||
|
||||
# Set the value of this setting to true to log all events, including system usage information
|
||||
# and all requests.
|
||||
# logging.verbose: false
|
||||
|
||||
# Set the interval in milliseconds to sample system and process performance
|
||||
# metrics. Minimum is 100ms. Defaults to 10000.
|
||||
# ops.interval: 10000
|
@@ -1,25 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
host="$1"
|
||||
shift
|
||||
cmd="kibana"
|
||||
WAZUH_KIBANA_PLUGIN_URL=${WAZUH_KIBANA_PLUGIN_URL:-https://packages.wazuh.com/wazuhapp/wazuhapp-2.0_5.4.2.zip}
|
||||
|
||||
until curl -XGET $host:9200; do
|
||||
>&2 echo "Elastic is unavailable - sleeping"
|
||||
sleep 1
|
||||
done
|
||||
|
||||
sleep 30
|
||||
|
||||
>&2 echo "Elastic is up - executing command"
|
||||
|
||||
if /usr/share/kibana/bin/kibana-plugin list | grep wazuh; then
|
||||
echo "Wazuh APP already installed"
|
||||
else
|
||||
/usr/share/kibana/bin/kibana-plugin install ${WAZUH_KIBANA_PLUGIN_URL}
|
||||
fi
|
||||
|
||||
exec $cmd
|
1
kibana/config/wazuhapp-3.11.5_7.4.2.zip.REMOVED.git-id
Normal file
1
kibana/config/wazuhapp-3.11.5_7.4.2.zip.REMOVED.git-id
Normal file
@@ -0,0 +1 @@
|
||||
d3370881d16407941e250126bd331db13e7c8b63
|
30
kibana/config/welcome_wazuh.sh
Normal file
30
kibana/config/welcome_wazuh.sh
Normal file
@@ -0,0 +1,30 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
if [[ $CHANGE_WELCOME == "true" ]]
|
||||
then
|
||||
|
||||
rm -rf ./optimize/bundles
|
||||
|
||||
kibana_path="/usr/share/kibana"
|
||||
# Set Wazuh app as the default landing page
|
||||
echo "Set Wazuh app as the default landing page"
|
||||
echo "server.defaultRoute: /app/wazuh" >> $kibana_path/config/kibana.yml
|
||||
|
||||
# Redirect Kibana welcome screen to Discover
|
||||
echo "Redirect Kibana welcome screen to Discover"
|
||||
sed -i "s:'/app/kibana#/home':'/app/wazuh':g" $kibana_path/src/core/public/chrome/chrome_service.js
|
||||
|
||||
# Hide management undesired links
|
||||
echo "Hide management undesired links"
|
||||
sed -i 's#visible: true#visible: false#g' $kibana_path/x-pack/legacy/plugins/rollup/public/crud_app/index.js
|
||||
sed -i 's#visible: true#visible: false#g' $kibana_path/x-pack/legacy/plugins/license_management/public/management_section.js
|
||||
sed -i 's#visible: true#visible: false#g' $kibana_path/x-pack/legacy/plugins/index_lifecycle_management/public/register_management_section.js
|
||||
sed -i 's#visible: true#visible: false#g' $kibana_path/x-pack/legacy/plugins/cross_cluster_replication/public/register_routes.js
|
||||
sed -i 's#visible: true#visible: false#g' $kibana_path/x-pack/legacy/plugins/remote_clusters/public/index.js
|
||||
sed -i 's#visible: true#visible: false#g' $kibana_path/x-pack/legacy/plugins/upgrade_assistant/public/index.js
|
||||
sed -i 's#visible: true#visible: false#g' $kibana_path/x-pack/legacy/plugins/snapshot_restore/public/plugin.js
|
||||
sed -i 's#visible: true#visible: false#g' $kibana_path/x-pack/legacy/plugins/remote_clusters/public/plugin.js
|
||||
sed -i 's#visible: true#visible: false#g' $kibana_path/x-pack/legacy/plugins/index_management/public/register_management_section.js
|
||||
fi
|
||||
|
43
kibana/config/xpack_config.sh
Normal file
43
kibana/config/xpack_config.sh
Normal file
@@ -0,0 +1,43 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
kibana_config_file="/usr/share/kibana/config/kibana.yml"
|
||||
if grep -Fq "#xpack features" "$kibana_config_file";
|
||||
then
|
||||
declare -A CONFIG_MAP=(
|
||||
[xpack.apm.ui.enabled]=$XPACK_APM
|
||||
[xpack.grokdebugger.enabled]=$XPACK_DEVTOOLS
|
||||
[xpack.searchprofiler.enabled]=$XPACK_DEVTOOLS
|
||||
[xpack.ml.enabled]=$XPACK_ML
|
||||
[xpack.canvas.enabled]=$XPACK_CANVAS
|
||||
[xpack.logstash.enabled]=$XPACK_LOGS
|
||||
[xpack.infra.enabled]=$XPACK_INFRA
|
||||
[xpack.monitoring.enabled]=$XPACK_MONITORING
|
||||
[xpack.maps.enabled]=$XPACK_MAPS
|
||||
[xpack.uptime.enabled]=$XPACK_UPTIME
|
||||
[xpack.siem.enabled]=$XPACK_SIEM
|
||||
[console.enabled]=$XPACK_DEVTOOLS
|
||||
)
|
||||
for i in "${!CONFIG_MAP[@]}"
|
||||
do
|
||||
if [ "${CONFIG_MAP[$i]}" != "" ]; then
|
||||
sed -i 's/.'"$i"'.*/'"$i"': '"${CONFIG_MAP[$i]}"'/' $kibana_config_file
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "
|
||||
#xpack features
|
||||
xpack.apm.ui.enabled: $XPACK_APM
|
||||
xpack.grokdebugger.enabled: $XPACK_DEVTOOLS
|
||||
xpack.searchprofiler.enabled: $XPACK_DEVTOOLS
|
||||
xpack.ml.enabled: $XPACK_ML
|
||||
xpack.canvas.enabled: $XPACK_CANVAS
|
||||
xpack.logstash.enabled: $XPACK_LOGS
|
||||
xpack.infra.enabled: $XPACK_INFRA
|
||||
xpack.monitoring.enabled: $XPACK_MONITORING
|
||||
xpack.maps.enabled: $XPACK_MAPS
|
||||
xpack.uptime.enabled: $XPACK_UPTIME
|
||||
xpack.siem.enabled: $XPACK_SIEM
|
||||
console.enabled: $XPACK_DEVTOOLS
|
||||
" >> $kibana_config_file
|
||||
fi
|
@@ -1,12 +1,46 @@
|
||||
FROM logstash:5.4.2
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
RUN apt-get update
|
||||
ARG LOGSTASH_VERSION=7.4.2
|
||||
FROM docker.elastic.co/logstash/logstash:${LOGSTASH_VERSION}
|
||||
|
||||
COPY config/logstash.conf /etc/logstash/conf.d/logstash.conf
|
||||
COPY config/wazuh-elastic5-template.json /etc/logstash/wazuh-elastic5-template.json
|
||||
COPY --chown=logstash:logstash config/entrypoint.sh /entrypoint.sh
|
||||
|
||||
RUN chmod 755 /entrypoint.sh
|
||||
|
||||
ADD config/run.sh /tmp/run.sh
|
||||
RUN chmod 755 /tmp/run.sh
|
||||
RUN rm -f /usr/share/logstash/pipeline/logstash.conf
|
||||
|
||||
ENTRYPOINT ["/tmp/run.sh"]
|
||||
ENV PIPELINE_FROM_FILE="false"
|
||||
COPY config/01-wazuh.conf /usr/share/logstash/pipeline/01-wazuh.conf
|
||||
|
||||
# This CA is created for testing. Please set your own CA pem signed certificate.
|
||||
# command: $ docker build <logstash_directory> --build-arg SECURITY_CA_PEM_LOCATION=<CA_PEM_LOCATION> --build-arg SECURITY_CA_PEM_ARG=<CA_PEM_NAME>
|
||||
# ENV variables are necessary: SECURITY_CA_PEM
|
||||
# Sample:
|
||||
# ARG SECURITY_CA_PEM_LOCATION="config/server.TEST-CA-signed.pem"
|
||||
# ARG SECURITY_CA_PEM_ARG="server.TEST-CA-signed.pem"
|
||||
ARG SECURITY_CA_PEM_LOCATION=""
|
||||
ARG SECURITY_CA_PEM_ARG=""
|
||||
|
||||
# CA for secure communication with Elastic
|
||||
ADD $SECURITY_CA_PEM_LOCATION /usr/share/logstash/config
|
||||
|
||||
# Set permissions for CA
|
||||
USER root
|
||||
RUN if [[ "x$SECURITY_CA_PEM_LOCATION" == x ]] ; then echo Nothing to do ; else chown logstash: /usr/share/logstash/config/$SECURITY_CA_PEM_ARG ; fi
|
||||
RUN if [[ "x$SECURITY_CA_PEM_LOCATION" == x ]] ; then echo Nothing to do ; else chmod 400 /usr/share/logstash/config/$SECURITY_CA_PEM_ARG ; fi
|
||||
|
||||
# Add entrypoint scripts
|
||||
RUN mkdir /entrypoint-scripts
|
||||
RUN chmod -R 774 /entrypoint-scripts
|
||||
RUN chown -R logstash:logstash /entrypoint-scripts
|
||||
|
||||
COPY --chown=logstash:logstash ./config/05-decrypt_credentials.sh /entrypoint-scripts/05-decrypt_credentials.sh
|
||||
COPY --chown=logstash:logstash ./config/10-entrypoint.sh /entrypoint-scripts/10-entrypoint.sh
|
||||
COPY --chown=logstash:logstash ./config/10-entrypoint_configuration.sh ./config/10-entrypoint_configuration.sh
|
||||
RUN chmod +x /entrypoint-scripts/05-decrypt_credentials.sh && \
|
||||
chmod +x /entrypoint-scripts/10-entrypoint.sh && \
|
||||
chmod +x ./config/10-entrypoint_configuration.sh
|
||||
|
||||
USER logstash
|
||||
|
||||
ENTRYPOINT /entrypoint.sh
|
||||
|
64
logstash/config/01-wazuh.conf
Normal file
64
logstash/config/01-wazuh.conf
Normal file
@@ -0,0 +1,64 @@
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
# Wazuh - Logstash configuration file
|
||||
## Remote Wazuh Manager - Filebeat input
|
||||
input {
|
||||
beats {
|
||||
port => 5000
|
||||
# ssl => true
|
||||
# ssl_certificate => "/etc/logstash/logstash.crt"
|
||||
# ssl_key => "/etc/logstash/logstash.key"
|
||||
}
|
||||
}
|
||||
filter {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
}
|
||||
filter {
|
||||
if [data][srcip] {
|
||||
mutate {
|
||||
add_field => [ "@src_ip", "%{[data][srcip]}" ]
|
||||
}
|
||||
}
|
||||
if [data][aws][sourceIPAddress] {
|
||||
mutate {
|
||||
add_field => [ "@src_ip", "%{[data][aws][sourceIPAddress]}" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
filter {
|
||||
geoip {
|
||||
source => "@src_ip"
|
||||
target => "GeoLocation"
|
||||
fields => ["city_name", "country_name", "region_name", "location"]
|
||||
}
|
||||
date {
|
||||
match => ["timestamp", "ISO8601"]
|
||||
target => "@timestamp"
|
||||
}
|
||||
mutate {
|
||||
remove_field => [ "beat", "input_type", "tags", "count", "@version", "log", "offset", "type", "@src_ip", "host"]
|
||||
}
|
||||
}
|
||||
filter {
|
||||
# Workarounds for vulnerability-detector
|
||||
if "vulnerability-detector" in [rule][groups] {
|
||||
# Drop vulnerability-detector events from Manager
|
||||
if [agent][id] == "000"{
|
||||
drop { }
|
||||
}
|
||||
|
||||
# if exists, remove data.vulnerability.published field due to conflicts
|
||||
if [data][vulnerability][published] {
|
||||
mutate {
|
||||
remove_field => [ "[data][vulnerability][published]" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
output {
|
||||
elasticsearch {
|
||||
hosts => ["elasticsearch:9200"]
|
||||
index => "wazuh-alerts-3.x-%{+YYYY.MM.dd}"
|
||||
}
|
||||
}
|
15
logstash/config/05-decrypt_credentials.sh
Normal file
15
logstash/config/05-decrypt_credentials.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Decrypt credentials.
|
||||
# If the credentials of the users to be created are encrypted,
|
||||
# they must be decrypted for later use.
|
||||
##############################################################################
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
echo "Security credentials file not used. Nothing to do."
|
||||
else
|
||||
echo "TO DO"
|
||||
fi
|
||||
# TO DO
|
163
logstash/config/10-entrypoint.sh
Normal file
163
logstash/config/10-entrypoint.sh
Normal file
@@ -0,0 +1,163 @@
|
||||
#!/bin/bash
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
#
|
||||
# OSSEC container bootstrap. See the README for information of the environment
|
||||
# variables expected by this script.
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
##############################################################################
|
||||
# Set elasticsearch url.
|
||||
##############################################################################
|
||||
|
||||
if [ "x${ELASTICSEARCH_URL}" = "x" ]; then
|
||||
el_url="http://elasticsearch:9200"
|
||||
else
|
||||
el_url="${ELASTICSEARCH_URL}"
|
||||
fi
|
||||
|
||||
echo "ENTRYPOINT - Elasticsearch url: $el_url"
|
||||
|
||||
##############################################################################
|
||||
# Get Logstash credentials.
|
||||
##############################################################################
|
||||
|
||||
LOGSTASH_USER=""
|
||||
LOGSTASH_PASS=""
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
LOGSTASH_USER=${SECURITY_LOGSTASH_USER}
|
||||
LOGSTASH_PASS=${SECURITY_LOGSTASH_PASS}
|
||||
else
|
||||
input=${SECURITY_CREDENTIALS_FILE}
|
||||
while IFS= read -r line
|
||||
do
|
||||
if [[ $line == *"LOGSTASH_PASSWORD"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
LOGSTASH_PASS=${arrIN[1]}
|
||||
elif [[ $line == *"LOGSTASH_USER"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
LOGSTASH_USER=${arrIN[1]}
|
||||
fi
|
||||
done < "$input"
|
||||
|
||||
fi
|
||||
|
||||
echo "ENTRYPOINT - Logstash credentials obtained."
|
||||
|
||||
##############################################################################
|
||||
# Set authentication for curl command.
|
||||
##############################################################################
|
||||
|
||||
if [ ${SECURITY_ENABLED} != "no" ]; then
|
||||
auth="-u ${LOGSTASH_USER}:${LOGSTASH_PASS} -k"
|
||||
elif [ ${ENABLED_XPACK} != "true" || "x${ELASTICSEARCH_USERNAME}" = "x" || "x${ELASTICSEARCH_PASSWORD}" = "x" ]; then
|
||||
auth=""
|
||||
else
|
||||
auth="--user ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD}"
|
||||
fi
|
||||
|
||||
echo "ENTRYPOINT - curl authentication established"
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Customize logstash output ip.
|
||||
##############################################################################
|
||||
|
||||
if [ "$LOGSTASH_OUTPUT" != "" ]; then
|
||||
>&2 echo "ENTRYPOINT - Customize Logstash ouput ip."
|
||||
sed -i 's|http://elasticsearch:9200|'$LOGSTASH_OUTPUT'|g' /usr/share/logstash/config/logstash.yml
|
||||
|
||||
if [[ "$PIPELINE_FROM_FILE" == "false" ]]; then
|
||||
sed -i 's|elasticsearch:9200|'$LOGSTASH_OUTPUT'|g' /usr/share/logstash/pipeline/01-wazuh.conf
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Waiting for elasticsearch.
|
||||
##############################################################################
|
||||
|
||||
until curl $auth -XGET $el_url; do
|
||||
>&2 echo "ENTRYPOINT - Elastic is unavailable - sleeping."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
sleep 2
|
||||
|
||||
>&2 echo "ENTRYPOINT - Elasticsearch is up."
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Create keystore if security is enabled.
|
||||
##############################################################################
|
||||
|
||||
if [[ $SECURITY_ENABLED == "yes" ]]; then
|
||||
|
||||
echo "ENTRYPOINT - Create Keystore."
|
||||
|
||||
## Create secure keystore
|
||||
SECURITY_RANDOM_PASS=`date +%s | sha256sum | base64 | head -c 32 ; echo`
|
||||
export LOGSTASH_KEYSTORE_PASS=$SECURITY_RANDOM_PASS
|
||||
/usr/share/logstash/bin/logstash-keystore --path.settings /usr/share/logstash/config create
|
||||
|
||||
## Settings for logstash.yml
|
||||
bash /usr/share/logstash/config/10-entrypoint_configuration.sh
|
||||
|
||||
## Add keys to the keystore
|
||||
echo -e "$LOGSTASH_USER" | /usr/share/logstash/bin/logstash-keystore --path.settings /usr/share/logstash/config add LOGSTASH_KS_USER
|
||||
echo -e "$LOGSTASH_PASS" | /usr/share/logstash/bin/logstash-keystore --path.settings /usr/share/logstash/config add LOGSTASH_KS_PASS
|
||||
|
||||
fi
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Waiting for wazuh alerts template
|
||||
##############################################################################
|
||||
|
||||
strlen=0
|
||||
|
||||
while [[ $strlen -eq 0 ]]
|
||||
do
|
||||
template=$(curl $auth $el_url/_cat/templates/wazuh -s)
|
||||
strlen=${#template}
|
||||
>&2 echo "ENTRYPOINT - Wazuh alerts template not loaded - sleeping."
|
||||
sleep 2
|
||||
done
|
||||
|
||||
sleep 2
|
||||
|
||||
>&2 echo "ENTRYPOINT - Wazuh alerts template is loaded."
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Remove credentials file
|
||||
##############################################################################
|
||||
|
||||
>&2 echo "ENTRYPOINT - Removing unnecessary files."
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
echo "ENTRYPOINT - Security credentials file not used. Nothing to do."
|
||||
else
|
||||
shred -zvu ${SECURITY_CREDENTIALS_FILE}
|
||||
fi
|
||||
|
||||
>&2 echo "ENTRYPOINT - Unnecessary files removed."
|
||||
|
||||
##############################################################################
|
||||
# Map environment variables to entries in logstash.yml.
|
||||
# Note that this will mutate logstash.yml in place if any such settings are found.
|
||||
# This may be undesirable, especially if logstash.yml is bind-mounted from the
|
||||
# host system.
|
||||
##############################################################################
|
||||
|
||||
env2yaml /usr/share/logstash/config/logstash.yml
|
||||
|
||||
export LS_JAVA_OPTS="-Dls.cgroup.cpuacct.path.override=/ -Dls.cgroup.cpu.path.override=/ $LS_JAVA_OPTS"
|
||||
|
||||
if [[ -z $1 ]] || [[ ${1:0:1} == '-' ]] ; then
|
||||
exec logstash "$@"
|
||||
else
|
||||
exec "$@"
|
||||
fi
|
27
logstash/config/10-entrypoint_configuration.sh
Normal file
27
logstash/config/10-entrypoint_configuration.sh
Normal file
@@ -0,0 +1,27 @@
|
||||
#!/bin/bash
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
#
|
||||
# OSSEC container bootstrap. See the README for information of the environment
|
||||
# variables expected by this script.
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
##############################################################################
|
||||
# Adapt logstash.yml configuration.
|
||||
##############################################################################
|
||||
|
||||
if [[ $SECURITY_ENABLED == "yes" ]]; then
|
||||
|
||||
echo "CONFIGURATION - TO DO"
|
||||
|
||||
# Settings for logstash.yml
|
||||
# Example:
|
||||
# echo "
|
||||
# xpack.monitoring.enabled: true
|
||||
# xpack.monitoring.elasticsearch.username: LOGSTASH_USER
|
||||
# xpack.monitoring.elasticsearch.password: LOGSTASH_PASS
|
||||
# xpack.monitoring.elasticsearch.ssl.certificate_authority: /usr/share/logstash/config/CA.pem
|
||||
# " >> /usr/share/logstash/config/logstash.yml
|
||||
|
||||
fi
|
8
logstash/config/entrypoint.sh
Normal file
8
logstash/config/entrypoint.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
# It will run every .sh script located in entrypoint-scripts folder in lexicographical order
|
||||
for script in `ls /entrypoint-scripts/*.sh | sort -n`; do
|
||||
bash "$script"
|
||||
|
||||
done
|
@@ -1,43 +0,0 @@
|
||||
# Wazuh - Logstash configuration file
|
||||
## Remote Wazuh Manager - Filebeat input
|
||||
input {
|
||||
beats {
|
||||
port => 5000
|
||||
codec => "json_lines"
|
||||
# ssl => true
|
||||
# ssl_certificate => "/etc/logstash/logstash.crt"
|
||||
# ssl_key => "/etc/logstash/logstash.key"
|
||||
}
|
||||
}
|
||||
## Local Wazuh Manager - JSON file input
|
||||
#input {
|
||||
# file {
|
||||
# type => "wazuh-alerts"
|
||||
# path => "/var/ossec/logs/alerts/alerts.json"
|
||||
# codec => "json"
|
||||
# }
|
||||
#}
|
||||
filter {
|
||||
geoip {
|
||||
source => "srcip"
|
||||
target => "GeoLocation"
|
||||
fields => ["city_name", "continent_code", "country_code2", "country_name", "region_name", "location"]
|
||||
}
|
||||
date {
|
||||
match => ["timestamp", "ISO8601"]
|
||||
target => "@timestamp"
|
||||
}
|
||||
mutate {
|
||||
remove_field => [ "timestamp", "beat", "fields", "input_type", "tags", "count", "@version", "log", "offset", "type"]
|
||||
}
|
||||
}
|
||||
output {
|
||||
elasticsearch {
|
||||
hosts => ["elasticsearch:9200"]
|
||||
index => "wazuh-alerts-%{+YYYY.MM.dd}"
|
||||
document_type => "wazuh"
|
||||
template => "/etc/logstash/wazuh-elastic5-template.json"
|
||||
template_name => "wazuh"
|
||||
template_overwrite => true
|
||||
}
|
||||
}
|
@@ -1,31 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
#
|
||||
# OSSEC container bootstrap. See the README for information of the environment
|
||||
# variables expected by this script.
|
||||
#
|
||||
|
||||
#
|
||||
|
||||
#
|
||||
# Apply Templates
|
||||
#
|
||||
|
||||
set -e
|
||||
host="elasticsearch"
|
||||
until curl -XGET $host:9200; do
|
||||
>&2 echo "Elastic is unavailable - sleeping"
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# Add logstash as command if needed
|
||||
if [ "${1:0:1}" = '-' ]; then
|
||||
set -- logstash "$@"
|
||||
fi
|
||||
|
||||
# Run as user "logstash" if the command is "logstash"
|
||||
if [ "$1" = 'logstash' ]; then
|
||||
set -- gosu logstash "$@"
|
||||
fi
|
||||
|
||||
exec "$@"
|
@@ -1,620 +0,0 @@
|
||||
{
|
||||
"order": 0,
|
||||
"template": "wazuh*",
|
||||
"settings": {
|
||||
"index.refresh_interval": "5s"
|
||||
},
|
||||
"mappings": {
|
||||
"wazuh": {
|
||||
"dynamic_templates": [
|
||||
{
|
||||
"string_as_keyword": {
|
||||
"match_mapping_type": "string",
|
||||
"mapping": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"@timestamp": {
|
||||
"type": "date",
|
||||
"format": "dateOptionalTime"
|
||||
},
|
||||
"@version": {
|
||||
"type": "text"
|
||||
},
|
||||
"agent": {
|
||||
"properties": {
|
||||
"ip": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"id": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"name": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
}
|
||||
}
|
||||
},
|
||||
"manager": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
}
|
||||
}
|
||||
},
|
||||
"dstuser": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"AlertsFile": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"full_log": {
|
||||
"type": "text"
|
||||
},
|
||||
"previous_log": {
|
||||
"type": "text"
|
||||
},
|
||||
"GeoLocation": {
|
||||
"properties": {
|
||||
"area_code": {
|
||||
"type": "long"
|
||||
},
|
||||
"city_name": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"continent_code": {
|
||||
"type": "text"
|
||||
},
|
||||
"coordinates": {
|
||||
"type": "double"
|
||||
},
|
||||
"country_code2": {
|
||||
"type": "text"
|
||||
},
|
||||
"country_code3": {
|
||||
"type": "text"
|
||||
},
|
||||
"country_name": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"dma_code": {
|
||||
"type": "long"
|
||||
},
|
||||
"ip": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"latitude": {
|
||||
"type": "double"
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"longitude": {
|
||||
"type": "double"
|
||||
},
|
||||
"postal_code": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"real_region_name": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"region_name": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"timezone": {
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
},
|
||||
"host": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"syscheck": {
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"sha1_before": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"sha1_after": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"uid_before": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"uid_after": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"gid_before": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"gid_after": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"perm_before": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"perm_after": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"md5_after": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"md5_before": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"gname_after": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"gname_before": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"inode_after": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"inode_before": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"mtime_after": {
|
||||
"type": "date",
|
||||
"format": "dateOptionalTime",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"mtime_before": {
|
||||
"type": "date",
|
||||
"format": "dateOptionalTime",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"uname_after": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"uname_before": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"size_before": {
|
||||
"type": "long",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"size_after": {
|
||||
"type": "long",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"diff": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"event": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
}
|
||||
}
|
||||
},
|
||||
"location": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"message": {
|
||||
"type": "text"
|
||||
},
|
||||
"offset": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"rule": {
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"groups": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"level": {
|
||||
"type": "long",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"id": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"cve": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"info": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"frequency": {
|
||||
"type": "long",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"firedtimes": {
|
||||
"type": "long",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"cis": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"pci_dss": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
}
|
||||
}
|
||||
},
|
||||
"decoder": {
|
||||
"properties": {
|
||||
"parent": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"name": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"ftscomment": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"fts": {
|
||||
"type": "long",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"accumulate": {
|
||||
"type": "long",
|
||||
"doc_values": "true"
|
||||
}
|
||||
}
|
||||
},
|
||||
"srcip": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"protocol": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"action": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"dstip": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"dstport": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"srcuser": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"program_name": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"id": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"status": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"command": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"url": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"data": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"system_name": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"type": {
|
||||
"type": "text"
|
||||
},
|
||||
"title": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"oscap": {
|
||||
"properties": {
|
||||
"check.title": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"check.id": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"check.result": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"check.severity": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"check.description": {
|
||||
"type": "text"
|
||||
},
|
||||
"check.rationale": {
|
||||
"type": "text"
|
||||
},
|
||||
"check.references": {
|
||||
"type": "text"
|
||||
},
|
||||
"check.identifiers": {
|
||||
"type": "text"
|
||||
},
|
||||
"check.oval.id": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"scan.id": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"scan.content": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"scan.benchmark.id": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"scan.profile.title": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"scan.profile.id": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"scan.score": {
|
||||
"type": "double",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"scan.return_code": {
|
||||
"type": "long",
|
||||
"doc_values": "true"
|
||||
}
|
||||
}
|
||||
},
|
||||
"audit": {
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"id": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"syscall": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"exit": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"ppid": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"pid": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"auid": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"uid": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"gid": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"euid": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"suid": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"fsuid": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"egid": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"sgid": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"fsgid": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"tty": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"session": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"command": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"exe": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"key": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"cwd": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"directory.name": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"directory.inode": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"directory.mode": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"file.name": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"file.inode": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"file.mode": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"acct": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"dev": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"enforcing": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"list": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"old-auid": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"old-ses": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"old_enforcing": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"old_prom": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"op": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"prom": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"res": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"srcip": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"subj": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
},
|
||||
"success": {
|
||||
"type": "keyword",
|
||||
"doc_values": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"agent": {
|
||||
"properties": {
|
||||
"@timestamp": {
|
||||
"type": "date",
|
||||
"format": "dateOptionalTime"
|
||||
},
|
||||
"status": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"ip": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"host": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"id": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
19
nginx/Dockerfile
Normal file
19
nginx/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
FROM nginx:latest
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN apt-get update && apt-get install -y openssl apache2-utils
|
||||
|
||||
COPY config/entrypoint.sh /entrypoint.sh
|
||||
|
||||
RUN chmod 755 /entrypoint.sh
|
||||
|
||||
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
VOLUME ["/etc/nginx/conf.d"]
|
||||
|
||||
ENV NGINX_NAME="foo" \
|
||||
NGINX_PWD="bar"
|
||||
|
||||
ENTRYPOINT /entrypoint.sh
|
79
nginx/config/entrypoint.sh
Normal file
79
nginx/config/entrypoint.sh
Normal file
@@ -0,0 +1,79 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
set -e
|
||||
|
||||
# Generating certificates.
|
||||
if [ ! -d /etc/nginx/conf.d/ssl ]; then
|
||||
echo "Generating SSL certificates"
|
||||
mkdir -p /etc/nginx/conf.d/ssl/certs /etc/nginx/conf.d/ssl/private
|
||||
openssl req -x509 -batch -nodes -days 365 -newkey rsa:2048 -keyout /etc/nginx/conf.d/ssl/private/kibana-access.key -out /etc/nginx/conf.d/ssl/certs/kibana-access.pem >/dev/null
|
||||
else
|
||||
echo "SSL certificates already present"
|
||||
fi
|
||||
|
||||
# Setting users credentials.
|
||||
# In order to set NGINX_CREDENTIALS, before "docker-compose up -d" run (a or b):
|
||||
#
|
||||
# a) export NGINX_CREDENTIALS="user1:pass1;user2:pass2;" or
|
||||
# export NGINX_CREDENTIALS="user1:pass1;user2:pass2"
|
||||
#
|
||||
# b) Set NGINX_CREDENTIALS in docker-compose.yml:
|
||||
# NGINX_CREDENTIALS=user1:pass1;user2:pass2; or
|
||||
# NGINX_CREDENTIALS=user1:pass1;user2:pass2
|
||||
#
|
||||
if [ ! -f /etc/nginx/conf.d/kibana.htpasswd ]; then
|
||||
echo "Setting users credentials"
|
||||
if [ ! -z "$NGINX_CREDENTIALS" ]; then
|
||||
IFS=';' read -r -a users <<< "$NGINX_CREDENTIALS"
|
||||
for index in "${!users[@]}"
|
||||
do
|
||||
IFS=':' read -r -a credentials <<< "${users[index]}"
|
||||
if [ $index -eq 0 ]; then
|
||||
echo ${credentials[1]}|htpasswd -i -c /etc/nginx/conf.d/kibana.htpasswd ${credentials[0]} >/dev/null
|
||||
else
|
||||
echo ${credentials[1]}|htpasswd -i /etc/nginx/conf.d/kibana.htpasswd ${credentials[0]} >/dev/null
|
||||
fi
|
||||
done
|
||||
else
|
||||
# NGINX_PWD and NGINX_NAME are declared in nginx/Dockerfile
|
||||
echo $NGINX_PWD|htpasswd -i -c /etc/nginx/conf.d/kibana.htpasswd $NGINX_NAME >/dev/null
|
||||
fi
|
||||
else
|
||||
echo "Kibana credentials already configured"
|
||||
fi
|
||||
|
||||
if [ "x${NGINX_PORT}" = "x" ]; then
|
||||
NGINX_PORT=443
|
||||
fi
|
||||
|
||||
if [ "x${KIBANA_HOST}" = "x" ]; then
|
||||
KIBANA_HOST="kibana:5601"
|
||||
fi
|
||||
|
||||
echo "Configuring NGINX"
|
||||
cat > /etc/nginx/conf.d/default.conf <<EOF
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
return 301 https://\$host:${NGINX_PORT}\$request_uri;
|
||||
}
|
||||
|
||||
server {
|
||||
listen ${NGINX_PORT} default_server;
|
||||
listen [::]:${NGINX_PORT};
|
||||
ssl on;
|
||||
ssl_certificate /etc/nginx/conf.d/ssl/certs/kibana-access.pem;
|
||||
ssl_certificate_key /etc/nginx/conf.d/ssl/private/kibana-access.key;
|
||||
location / {
|
||||
auth_basic "Restricted";
|
||||
auth_basic_user_file /etc/nginx/conf.d/kibana.htpasswd;
|
||||
proxy_pass http://${KIBANA_HOST}/;
|
||||
proxy_buffer_size 128k;
|
||||
proxy_buffers 4 256k;
|
||||
proxy_busy_buffers_size 256k;
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
nginx -g 'daemon off;'
|
145
wazuh/Dockerfile
145
wazuh/Dockerfile
@@ -1,35 +1,134 @@
|
||||
FROM centos:latest
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
FROM phusion/baseimage:latest
|
||||
|
||||
COPY config/*.repo /etc/yum.repos.d/
|
||||
# Arguments
|
||||
ARG FILEBEAT_VERSION=7.4.2
|
||||
ARG WAZUH_VERSION=3.11.5-1
|
||||
|
||||
RUN yum -y update; yum clean all;
|
||||
RUN yum -y install epel-release openssl useradd; yum clean all
|
||||
RUN yum -y install postfix mailx cyrus-sasl cyrus-sasl-plain; yum clean all
|
||||
RUN groupadd -g 1000 ossec
|
||||
RUN useradd -u 1000 -g 1000 ossec
|
||||
RUN yum install -y wazuh-manager wazuh-api
|
||||
# Environment variables
|
||||
ENV API_USER="foo" \
|
||||
API_PASS="bar"
|
||||
|
||||
ARG TEMPLATE_VERSION="v3.11.5"
|
||||
ENV FILEBEAT_DESTINATION="elasticsearch"
|
||||
|
||||
ADD config/data_dirs.env /data_dirs.env
|
||||
ADD config/init.bash /init.bash
|
||||
COPY config/wazuh-manager_3.11.5-1_amd64.deb /wazuh-manager_3.11.5-1_amd64.deb
|
||||
COPY config/wazuh-api_3.11.5-1_amd64.deb /wazuh-api_3.11.5-1_amd64.deb
|
||||
|
||||
# Install packages
|
||||
RUN set -x && \
|
||||
echo "deb https://packages.wazuh.com/3.x/apt/ stable main" | tee /etc/apt/sources.list.d/wazuh.list && \
|
||||
curl -s https://packages.wazuh.com/key/GPG-KEY-WAZUH | apt-key add - && \
|
||||
curl --silent --location https://deb.nodesource.com/setup_8.x | bash - && \
|
||||
echo "postfix postfix/mailname string wazuh-manager" | debconf-set-selections && \
|
||||
echo "postfix postfix/main_mailer_type string 'Internet Site'" | debconf-set-selections && \
|
||||
groupadd -g 1000 ossec && \
|
||||
useradd -u 1000 -g 1000 -d /var/ossec ossec && \
|
||||
add-apt-repository universe && \
|
||||
apt-get update && \
|
||||
apt-get upgrade -y -o Dpkg::Options::="--force-confold" && \
|
||||
apt-get --no-install-recommends --no-install-suggests -y install openssl apt-transport-https vim expect python-boto python-pip python-cryptography && \
|
||||
apt-get --no-install-recommends --no-install-suggests -y install postfix bsd-mailx mailutils libsasl2-2 ca-certificates libsasl2-modules && \
|
||||
# apt-get --no-install-recommends --no-install-suggests -y install wazuh-manager=${WAZUH_VERSION} && \
|
||||
dpkg -i /wazuh-manager_3.11.5-1_amd64.deb && apt-get install -f && \
|
||||
# apt-get --no-install-recommends --no-install-suggests -y install nodejs wazuh-api=${WAZUH_VERSION} && \
|
||||
apt-get --no-install-recommends --no-install-suggests -y install nodejs && \
|
||||
dpkg -i /wazuh-api_3.11.5-1_amd64.deb && apt-get install -f && \
|
||||
# Disable updates to this package
|
||||
echo "wazuh-manager hold" | dpkg --set-selections && \
|
||||
echo "wazuh-api hold" | dpkg --set-selections && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && \
|
||||
rm -f /var/ossec/logs/alerts/*/*/* && \
|
||||
rm -f /var/ossec/logs/archives/*/*/* && \
|
||||
rm -f /var/ossec/logs/firewall/*/*/* && \
|
||||
rm -f /var/ossec/logs/api/*/*/* && \
|
||||
rm -f /var/ossec/logs/cluster/*/*/* && \
|
||||
rm -f /var/ossec/logs/ossec/*/*/* && \
|
||||
rm /var/ossec/var/run/* && \
|
||||
rm /wazuh-manager_3.11.5-1_amd64.deb && \
|
||||
curl -L -O https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-${FILEBEAT_VERSION}-amd64.deb && \
|
||||
dpkg -i filebeat-${FILEBEAT_VERSION}-amd64.deb && rm -f filebeat-${FILEBEAT_VERSION}-amd64.deb
|
||||
|
||||
# Services
|
||||
RUN mkdir /etc/service/wazuh && \
|
||||
mkdir /etc/service/wazuh-api && \
|
||||
mkdir /etc/service/postfix && \
|
||||
mkdir /etc/service/filebeat
|
||||
|
||||
COPY config/wazuh.runit.service /etc/service/wazuh/run
|
||||
COPY config/wazuh-api.runit.service /etc/service/wazuh-api/run
|
||||
COPY config/postfix.runit.service /etc/service/postfix/run
|
||||
COPY config/filebeat.runit.service /etc/service/filebeat/run
|
||||
|
||||
RUN chmod +x /etc/service/wazuh-api/run && \
|
||||
chmod +x /etc/service/wazuh/run && \
|
||||
chmod +x /etc/service/postfix/run && \
|
||||
chmod +x /etc/service/filebeat/run
|
||||
|
||||
# Copy configuration files from repository
|
||||
COPY config/filebeat_to_elasticsearch.yml ./
|
||||
COPY config/filebeat_to_logstash.yml ./
|
||||
|
||||
# Prepare permanent data
|
||||
# Sync calls are due to https://github.com/docker/docker/issues/9547
|
||||
RUN chmod 755 /init.bash &&\
|
||||
sync && /init.bash &&\
|
||||
sync && rm /init.bash
|
||||
COPY config/permanent_data.env /permanent_data.env
|
||||
COPY config/permanent_data.sh /permanent_data.sh
|
||||
RUN chmod 755 /permanent_data.sh && \
|
||||
sync && \
|
||||
/permanent_data.sh && \
|
||||
sync && \
|
||||
rm /permanent_data.sh
|
||||
|
||||
# Expose ports
|
||||
EXPOSE 55000/tcp 1514/udp 1515/tcp 514/udp 1516/tcp
|
||||
|
||||
RUN curl -L -O https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-5.4.2-x86_64.rpm &&\
|
||||
rpm -vi filebeat-5.4.2-x86_64.rpm && rm filebeat-5.4.2-x86_64.rpm
|
||||
# Setting volumes
|
||||
# Once we declared a volume in the Dockerfile, changes made to that path will have no effect. In other words, any changes made
|
||||
# to the these paths from here to the end of the Dockerfile will not be taken into account when mounting the volume.
|
||||
VOLUME ["/var/ossec/api/configuration"]
|
||||
VOLUME ["/var/ossec/etc"]
|
||||
VOLUME ["/var/ossec/logs"]
|
||||
VOLUME ["/var/ossec/queue"]
|
||||
VOLUME ["/var/ossec/var/multigroups"]
|
||||
VOLUME ["/var/ossec/integrations"]
|
||||
VOLUME ["/var/ossec/active-response/bin"]
|
||||
VOLUME ["/var/ossec/wodles"]
|
||||
VOLUME ["/etc/filebeat"]
|
||||
VOLUME ["/etc/postfix"]
|
||||
VOLUME ["/var/lib/filebeat"]
|
||||
|
||||
COPY config/filebeat.yml /etc/filebeat/
|
||||
# Prepare entrypoint scripts
|
||||
# Entrypoint scripts must be added to the entrypoint-scripts directory
|
||||
RUN mkdir /entrypoint-scripts
|
||||
|
||||
ADD config/run.sh /tmp/run.sh
|
||||
RUN chmod 755 /tmp/run.sh
|
||||
COPY config/entrypoint.sh /entrypoint.sh
|
||||
COPY config/00-decrypt_credentials.sh /entrypoint-scripts/00-decrypt_credentials.sh
|
||||
COPY config/01-wazuh.sh /entrypoint-scripts/01-wazuh.sh
|
||||
COPY config/02-set_filebeat_destination.sh /entrypoint-scripts/02-set_filebeat_destination.sh
|
||||
COPY config/03-config_filebeat.sh /entrypoint-scripts/03-config_filebeat.sh
|
||||
COPY config/20-ossec-configuration.sh /entrypoint-scripts/20-ossec-configuration.sh
|
||||
COPY config/25-backups.sh /entrypoint-scripts/25-backups.sh
|
||||
COPY config/35-remove_credentials_file.sh /entrypoint-scripts/35-remove_credentials_file.sh
|
||||
RUN chmod 755 /entrypoint.sh && \
|
||||
chmod 755 /entrypoint-scripts/00-decrypt_credentials.sh && \
|
||||
chmod 755 /entrypoint-scripts/01-wazuh.sh && \
|
||||
chmod 755 /entrypoint-scripts/02-set_filebeat_destination.sh && \
|
||||
chmod 755 /entrypoint-scripts/03-config_filebeat.sh && \
|
||||
chmod 755 /entrypoint-scripts/20-ossec-configuration.sh && \
|
||||
chmod 755 /entrypoint-scripts/25-backups.sh && \
|
||||
chmod 755 /entrypoint-scripts/35-remove_credentials_file.sh
|
||||
|
||||
VOLUME ["/var/ossec/data"]
|
||||
# Workaround.
|
||||
# Issues: Wazuh-api
|
||||
# https://github.com/wazuh/wazuh-api/issues/440
|
||||
# https://github.com/wazuh/wazuh-api/issues/443
|
||||
COPY --chown=root:ossec config/agents.js /var/ossec/api/controllers/agents.js
|
||||
RUN chmod 770 /var/ossec/api/controllers/agents.js
|
||||
|
||||
EXPOSE 55000/tcp 1514/udp 1515/tcp 514/udp
|
||||
# Load wazuh alerts template.
|
||||
ADD https://raw.githubusercontent.com/wazuh/wazuh/$TEMPLATE_VERSION/extensions/elasticsearch/7.x/wazuh-template.json /etc/filebeat
|
||||
RUN chmod go-w /etc/filebeat/wazuh-template.json
|
||||
|
||||
# Run supervisord so that the container will stay alive
|
||||
|
||||
ENTRYPOINT ["/tmp/run.sh"]
|
||||
# Run all services
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
15
wazuh/config/00-decrypt_credentials.sh
Normal file
15
wazuh/config/00-decrypt_credentials.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Decrypt credentials.
|
||||
# If the credentials of the API user to be created are encrypted,
|
||||
# it must be decrypted for later use.
|
||||
##############################################################################
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
echo "CREDENTIALS - Security credentials file not used. Nothing to do."
|
||||
else
|
||||
echo "CREDENTIALS - TO DO"
|
||||
fi
|
||||
# TO DO
|
249
wazuh/config/01-wazuh.sh
Normal file
249
wazuh/config/01-wazuh.sh
Normal file
@@ -0,0 +1,249 @@
|
||||
#!/bin/bash
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
# Variables
|
||||
source /permanent_data.env
|
||||
|
||||
WAZUH_INSTALL_PATH=/var/ossec
|
||||
WAZUH_CONFIG_MOUNT=/wazuh-config-mount
|
||||
AUTO_ENROLLMENT_ENABLED=${AUTO_ENROLLMENT_ENABLED:-true}
|
||||
API_GENERATE_CERTS=${API_GENERATE_CERTS:-true}
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Aux functions
|
||||
##############################################################################
|
||||
print() {
|
||||
echo -e $1
|
||||
}
|
||||
|
||||
error_and_exit() {
|
||||
echo "Error executing command: '$1'."
|
||||
echo 'Exiting.'
|
||||
exit 1
|
||||
}
|
||||
|
||||
exec_cmd() {
|
||||
eval $1 > /dev/null 2>&1 || error_and_exit "$1"
|
||||
}
|
||||
|
||||
exec_cmd_stdout() {
|
||||
eval $1 2>&1 || error_and_exit "$1"
|
||||
}
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Edit configuration
|
||||
##############################################################################
|
||||
|
||||
edit_configuration() { # $1 -> setting, $2 -> value
|
||||
sed -i "s/^config.$1\s=.*/config.$1 = \"$2\";/g" "${WAZUH_INSTALL_PATH}/api/configuration/config.js" || error_and_exit "sed (editing configuration)"
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# This function will attempt to mount every directory in PERMANENT_DATA
|
||||
# into the respective path.
|
||||
# If the path is empty means permanent data volume is also empty, so a backup
|
||||
# will be copied into it. Otherwise it will not be copied because there is
|
||||
# already data inside the volume for the specified path.
|
||||
##############################################################################
|
||||
|
||||
mount_permanent_data() {
|
||||
for permanent_dir in "${PERMANENT_DATA[@]}"; do
|
||||
# Check if the path is not empty
|
||||
if find ${permanent_dir} -mindepth 1 | read; then
|
||||
print "The path ${permanent_dir} is already mounted"
|
||||
else
|
||||
print "Installing ${permanent_dir}"
|
||||
exec_cmd "cp -a ${WAZUH_INSTALL_PATH}/data_tmp/permanent${permanent_dir}/. ${permanent_dir}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# This function will replace from the permanent data volume every file
|
||||
# contained in PERMANENT_DATA_EXCP
|
||||
# Some files as 'internal_options.conf' are saved as permanent data, but
|
||||
# they must be updated to work properly if wazuh version is changed.
|
||||
##############################################################################
|
||||
|
||||
apply_exclusion_data() {
|
||||
for exclusion_file in "${PERMANENT_DATA_EXCP[@]}"; do
|
||||
if [ -e ${WAZUH_INSTALL_PATH}/data_tmp/exclusion/${exclusion_file} ]
|
||||
then
|
||||
DIR=$(dirname "${exclusion_file}")
|
||||
if [ ! -e ${DIR} ]
|
||||
then
|
||||
mkdir -p ${DIR}
|
||||
fi
|
||||
|
||||
print "Updating ${exclusion_file}"
|
||||
exec_cmd "cp -p ${WAZUH_INSTALL_PATH}/data_tmp/exclusion/${exclusion_file} ${exclusion_file}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# This function will delete from the permanent data volume every file
|
||||
# contained in PERMANENT_DATA_DEL
|
||||
##############################################################################
|
||||
|
||||
remove_data_files() {
|
||||
for del_file in "${PERMANENT_DATA_DEL[@]}"; do
|
||||
if [ -e ${del_file} ]
|
||||
then
|
||||
print "Removing ${del_file}"
|
||||
exec_cmd "rm ${del_file}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Create certificates: Manager
|
||||
##############################################################################
|
||||
|
||||
create_ossec_key_cert() {
|
||||
print "Creating ossec-authd key and cert"
|
||||
exec_cmd "openssl genrsa -out ${WAZUH_INSTALL_PATH}/etc/sslmanager.key 4096"
|
||||
exec_cmd "openssl req -new -x509 -key ${WAZUH_INSTALL_PATH}/etc/sslmanager.key -out ${WAZUH_INSTALL_PATH}/etc/sslmanager.cert -days 3650 -subj /CN=${HOSTNAME}/"
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Create certificates: API
|
||||
##############################################################################
|
||||
|
||||
create_api_key_cert() {
|
||||
print "Enabling Wazuh API HTTPS"
|
||||
edit_configuration "https" "yes"
|
||||
print "Create Wazuh API key and cert"
|
||||
exec_cmd "openssl genrsa -out ${WAZUH_INSTALL_PATH}/api/configuration/ssl/server.key 4096"
|
||||
exec_cmd "openssl req -new -x509 -key ${WAZUH_INSTALL_PATH}/api/configuration/ssl/server.key -out ${WAZUH_INSTALL_PATH}/api/configuration/ssl/server.crt -days 3650 -subj /CN=${HOSTNAME}/"
|
||||
|
||||
# Granting proper permissions
|
||||
chmod 400 ${WAZUH_INSTALL_PATH}/api/configuration/ssl/server.key
|
||||
chmod 400 ${WAZUH_INSTALL_PATH}/api/configuration/ssl/server.crt
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Copy all files from $WAZUH_CONFIG_MOUNT to $WAZUH_INSTALL_PATH and respect
|
||||
# destination files permissions
|
||||
#
|
||||
# For example, to mount the file /var/ossec/data/etc/ossec.conf, mount it at
|
||||
# $WAZUH_CONFIG_MOUNT/etc/ossec.conf in your container and this code will
|
||||
# replace the ossec.conf file in /var/ossec/data/etc with yours.
|
||||
##############################################################################
|
||||
|
||||
mount_files() {
|
||||
if [ -e "$WAZUH_CONFIG_MOUNT" ]
|
||||
then
|
||||
print "Identified Wazuh configuration files to mount..."
|
||||
exec_cmd_stdout "cp --verbose -r $WAZUH_CONFIG_MOUNT/* $WAZUH_INSTALL_PATH"
|
||||
else
|
||||
print "No Wazuh configuration files to mount..."
|
||||
fi
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Stop OSSEC
|
||||
##############################################################################
|
||||
|
||||
function ossec_shutdown(){
|
||||
${WAZUH_INSTALL_PATH}/bin/ossec-control stop;
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Interpret any passed arguments (via docker command to this entrypoint) as
|
||||
# paths or commands, and execute them.
|
||||
#
|
||||
# This can be useful for actions that need to be run before the services are
|
||||
# started, such as "/var/ossec/bin/ossec-control enable agentless".
|
||||
##############################################################################
|
||||
|
||||
docker_custom_args() {
|
||||
for CUSTOM_COMMAND in "$@"
|
||||
do
|
||||
echo "Executing command \`${CUSTOM_COMMAND}\`"
|
||||
exec_cmd_stdout "${CUSTOM_COMMAND}"
|
||||
done
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Change Wazuh API user credentials.
|
||||
##############################################################################
|
||||
|
||||
change_api_user_credentials() {
|
||||
pushd /var/ossec/api/configuration/auth/
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
WAZUH_API_USER=${API_USER}
|
||||
WAZUH_API_PASS=${API_PASS}
|
||||
else
|
||||
input=${SECURITY_CREDENTIALS_FILE}
|
||||
while IFS= read -r line
|
||||
do
|
||||
if [[ $line == *"WAZUH_API_USER"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
WAZUH_API_USER=${arrIN[1]}
|
||||
elif [[ $line == *"WAZUH_API_PASS"* ]]; then
|
||||
arrIN=(${line//:/ })
|
||||
WAZUH_API_PASS=${arrIN[1]}
|
||||
fi
|
||||
done < "$input"
|
||||
fi
|
||||
|
||||
echo "Change Wazuh API user credentials"
|
||||
change_user="node htpasswd -b -c user $WAZUH_API_USER $WAZUH_API_PASS"
|
||||
eval $change_user
|
||||
popd
|
||||
}
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Main function
|
||||
##############################################################################
|
||||
|
||||
main() {
|
||||
# Mount permanent data (i.e. ossec.conf)
|
||||
mount_permanent_data
|
||||
|
||||
# Restore files stored in permanent data that are not permanent (i.e. internal_options.conf)
|
||||
apply_exclusion_data
|
||||
|
||||
# Remove some files in permanent_data (i.e. .template.db)
|
||||
remove_data_files
|
||||
|
||||
# Generate ossec-authd certs if AUTO_ENROLLMENT_ENABLED is true and does not exist
|
||||
if [ $AUTO_ENROLLMENT_ENABLED == true ]
|
||||
then
|
||||
if [ ! -e ${WAZUH_INSTALL_PATH}/etc/sslmanager.key ]
|
||||
then
|
||||
create_ossec_key_cert
|
||||
fi
|
||||
fi
|
||||
|
||||
# Generate API certs if API_GENERATE_CERTS is true and does not exist
|
||||
if [ $API_GENERATE_CERTS == true ]
|
||||
then
|
||||
if [ ! -e ${WAZUH_INSTALL_PATH}/api/configuration/ssl/server.crt ]
|
||||
then
|
||||
create_api_key_cert
|
||||
fi
|
||||
fi
|
||||
|
||||
# Mount selected files (WAZUH_CONFIG_MOUNT) to container
|
||||
mount_files
|
||||
|
||||
# Trap exit signals and do a proper shutdown
|
||||
trap "ossec_shutdown; exit" SIGINT SIGTERM
|
||||
|
||||
# Execute custom args
|
||||
docker_custom_args
|
||||
|
||||
# Change API user credentials
|
||||
change_api_user_credentials
|
||||
|
||||
# Delete temporary data folder
|
||||
rm -rf ${WAZUH_INSTALL_PATH}/data_tmp
|
||||
|
||||
}
|
||||
|
||||
main
|
30
wazuh/config/02-set_filebeat_destination.sh
Normal file
30
wazuh/config/02-set_filebeat_destination.sh
Normal file
@@ -0,0 +1,30 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Set Filebeat destination.
|
||||
##############################################################################
|
||||
|
||||
if [[ $FILEBEAT_DESTINATION == "elasticsearch" ]]; then
|
||||
|
||||
echo "FILEBEAT - Set destination to Elasticsearch"
|
||||
cp filebeat_to_elasticsearch.yml /etc/filebeat/filebeat.yml
|
||||
if [[ $FILEBEAT_OUTPUT != "" ]]; then
|
||||
sed -i "s/elasticsearch:9200/$FILEBEAT_OUTPUT:9200/" /etc/filebeat/filebeat.yml
|
||||
fi
|
||||
|
||||
elif [[ $FILEBEAT_DESTINATION == "logstash" ]]; then
|
||||
|
||||
echo "FILEBEAT - Set destination to Logstash"
|
||||
cp filebeat_to_logstash.yml /etc/filebeat/filebeat.yml
|
||||
if [[ $FILEBEAT_OUTPUT != "" ]]; then
|
||||
sed -i "s/logstash:5000/$FILEBEAT_OUTPUT:5000/" /etc/filebeat/filebeat.yml
|
||||
fi
|
||||
|
||||
else
|
||||
echo "FILEBEAT - Error choosing destination. Set default filebeat.yml "
|
||||
fi
|
||||
|
||||
echo "FILEBEAT - Set permissions"
|
||||
|
||||
chmod go-w /etc/filebeat/filebeat.yml
|
23
wazuh/config/03-config_filebeat.sh
Normal file
23
wazuh/config/03-config_filebeat.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
set -e
|
||||
|
||||
if [[ $FILEBEAT_DESTINATION == "elasticsearch" ]]; then
|
||||
|
||||
WAZUH_FILEBEAT_MODULE=wazuh-filebeat-0.1.tar.gz
|
||||
|
||||
# Modify the output to Elasticsearch if th ELASTICSEARCH_URL is set
|
||||
if [ "$ELASTICSEARCH_URL" != "" ]; then
|
||||
>&2 echo "FILEBEAT - Customize Elasticsearch ouput IP."
|
||||
sed -i 's|http://elasticsearch:9200|'$ELASTICSEARCH_URL'|g' /etc/filebeat/filebeat.yml
|
||||
fi
|
||||
|
||||
# Install Wazuh Filebeat Module
|
||||
|
||||
>&2 echo "FILEBEAT - Install Wazuh Filebeat Module."
|
||||
curl -s "https://packages.wazuh.com/3.x/filebeat/${WAZUH_FILEBEAT_MODULE}" | tar -xvz -C /usr/share/filebeat/module
|
||||
mkdir -p /usr/share/filebeat/module/wazuh
|
||||
chmod 755 -R /usr/share/filebeat/module/wazuh
|
||||
|
||||
fi
|
13
wazuh/config/20-ossec-configuration.sh
Normal file
13
wazuh/config/20-ossec-configuration.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Change Wazuh manager configuration.
|
||||
##############################################################################
|
||||
|
||||
# # Example:
|
||||
# # Change remote protocol from udp to tcp
|
||||
# PROTOCOL="tcp"
|
||||
# sed -i -e '/<remote>/,/<\/remote>/ s|<protocol>udp</protocol>|<protocol>'$PROTOCOL'</protocol>|g' /var/ossec/etc/ossec.conf
|
||||
# # It is necessary to restart the service in order to apply the new configuration.
|
||||
# service wazuh-manager restart
|
10
wazuh/config/25-backups.sh
Normal file
10
wazuh/config/25-backups.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Enable Wazuh backups and store them in a repository.
|
||||
##############################################################################
|
||||
|
||||
|
||||
# TO DO
|
||||
echo "BACKUPS - TO DO"
|
14
wazuh/config/35-remove_credentials_file.sh
Normal file
14
wazuh/config/35-remove_credentials_file.sh
Normal file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
##############################################################################
|
||||
# Decrypt credentials.
|
||||
# Remove the credentials file for security reasons.
|
||||
##############################################################################
|
||||
|
||||
if [[ "x${SECURITY_CREDENTIALS_FILE}" == "x" ]]; then
|
||||
echo "CREDENTIALS - Security credentials file not used. Nothing to do."
|
||||
else
|
||||
echo "CREDENTIALS - Remove credentiasl file."
|
||||
shred -zvu ${SECURITY_CREDENTIALS_FILE}
|
||||
fi
|
1258
wazuh/config/agents.js
Normal file
1258
wazuh/config/agents.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,9 +0,0 @@
|
||||
i=0
|
||||
DATA_DIRS[((i++))]="etc"
|
||||
DATA_DIRS[((i++))]="ruleset"
|
||||
DATA_DIRS[((i++))]="logs"
|
||||
DATA_DIRS[((i++))]="stats"
|
||||
DATA_DIRS[((i++))]="queue"
|
||||
DATA_DIRS[((i++))]="var/db"
|
||||
DATA_DIRS[((i++))]="api"
|
||||
export DATA_DIRS
|
15
wazuh/config/entrypoint.sh
Normal file
15
wazuh/config/entrypoint.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
# Trap to kill container if it is necessary.
|
||||
trap "exit" SIGINT SIGTERM
|
||||
# It will run every .sh script located in entrypoint-scripts folder in lexicographical order
|
||||
for script in `ls /entrypoint-scripts/*.sh | sort -n`; do
|
||||
bash "$script"
|
||||
done
|
||||
|
||||
##############################################################################
|
||||
# Start Wazuh Server.
|
||||
##############################################################################
|
||||
|
||||
/sbin/my_init
|
4
wazuh/config/filebeat.runit.service
Normal file
4
wazuh/config/filebeat.runit.service
Normal file
@@ -0,0 +1,4 @@
|
||||
#!/bin/sh
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
service filebeat start
|
||||
tail -f /var/log/filebeat/filebeat
|
@@ -1,16 +0,0 @@
|
||||
filebeat:
|
||||
prospectors:
|
||||
- input_type: log
|
||||
paths:
|
||||
- "/var/ossec/data/logs/alerts/alerts.json"
|
||||
document_type: wazuh-alerts
|
||||
json.message_key: log
|
||||
json.keys_under_root: true
|
||||
json.overwrite_keys: true
|
||||
|
||||
output:
|
||||
logstash:
|
||||
# The Logstash hosts
|
||||
hosts: ["logstash:5000"]
|
||||
# ssl:
|
||||
# certificate_authorities: ["/etc/filebeat/logstash.crt"]
|
55
wazuh/config/filebeat_to_elasticsearch.yml
Normal file
55
wazuh/config/filebeat_to_elasticsearch.yml
Normal file
@@ -0,0 +1,55 @@
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
# Wazuh - Filebeat configuration file
|
||||
filebeat.inputs:
|
||||
- type: log
|
||||
paths:
|
||||
- '/var/ossec/logs/alerts/alerts.json'
|
||||
|
||||
setup.template.json.enabled: true
|
||||
setup.template.json.path: "/etc/filebeat/wazuh-template.json"
|
||||
setup.template.json.name: "wazuh"
|
||||
setup.template.overwrite: true
|
||||
|
||||
processors:
|
||||
- decode_json_fields:
|
||||
fields: ['message']
|
||||
process_array: true
|
||||
max_depth: 200
|
||||
target: ''
|
||||
overwrite_keys: true
|
||||
- drop_fields:
|
||||
fields: ['message', 'ecs', 'beat', 'input_type', 'tags', 'count', '@version', 'log', 'offset', 'type', 'host']
|
||||
- rename:
|
||||
fields:
|
||||
- from: "data.aws.sourceIPAddress"
|
||||
to: "@src_ip"
|
||||
ignore_missing: true
|
||||
fail_on_error: false
|
||||
when:
|
||||
regexp:
|
||||
data.aws.sourceIPAddress: \b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b
|
||||
- rename:
|
||||
fields:
|
||||
- from: "data.srcip"
|
||||
to: "@src_ip"
|
||||
ignore_missing: true
|
||||
fail_on_error: false
|
||||
when:
|
||||
regexp:
|
||||
data.srcip: \b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b
|
||||
- rename:
|
||||
fields:
|
||||
- from: "data.win.eventdata.ipAddress"
|
||||
to: "@src_ip"
|
||||
ignore_missing: true
|
||||
fail_on_error: false
|
||||
when:
|
||||
regexp:
|
||||
data.win.eventdata.ipAddress: \b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b
|
||||
|
||||
output.elasticsearch:
|
||||
hosts: ['http://elasticsearch:9200']
|
||||
#pipeline: geoip
|
||||
indices:
|
||||
- index: 'wazuh-alerts-3.x-%{+yyyy.MM.dd}'
|
15
wazuh/config/filebeat_to_logstash.yml
Normal file
15
wazuh/config/filebeat_to_logstash.yml
Normal file
@@ -0,0 +1,15 @@
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
# Wazuh - Filebeat configuration file
|
||||
filebeat:
|
||||
inputs:
|
||||
- type: log
|
||||
paths:
|
||||
- "/var/ossec/logs/alerts/alerts.json"
|
||||
|
||||
output:
|
||||
logstash:
|
||||
# The Logstash hosts
|
||||
hosts: ["logstash:5000"]
|
||||
# ssl:
|
||||
# certificate_authorities: ["/etc/filebeat/logstash.crt"]
|
@@ -1,12 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
#
|
||||
# Initialize the custom data directory layout
|
||||
#
|
||||
source /data_dirs.env
|
||||
|
||||
cd /var/ossec
|
||||
for ossecdir in "${DATA_DIRS[@]}"; do
|
||||
mv ${ossecdir} ${ossecdir}-template
|
||||
ln -s $(realpath --relative-to=$(dirname ${ossecdir}) data)/${ossecdir} ${ossecdir}
|
||||
done
|
61
wazuh/config/permanent_data.env
Normal file
61
wazuh/config/permanent_data.env
Normal file
@@ -0,0 +1,61 @@
|
||||
# Permanent data mounted in volumes
|
||||
i=0
|
||||
PERMANENT_DATA[((i++))]="/var/ossec/api/configuration"
|
||||
PERMANENT_DATA[((i++))]="/var/ossec/etc"
|
||||
PERMANENT_DATA[((i++))]="/var/ossec/logs"
|
||||
PERMANENT_DATA[((i++))]="/var/ossec/queue"
|
||||
PERMANENT_DATA[((i++))]="/var/ossec/var/multigroups"
|
||||
PERMANENT_DATA[((i++))]="/var/ossec/integrations"
|
||||
PERMANENT_DATA[((i++))]="/var/ossec/active-response/bin"
|
||||
PERMANENT_DATA[((i++))]="/var/ossec/wodles"
|
||||
PERMANENT_DATA[((i++))]="/etc/filebeat"
|
||||
PERMANENT_DATA[((i++))]="/etc/postfix"
|
||||
export PERMANENT_DATA
|
||||
|
||||
# Files mounted in a volume that should not be permanent
|
||||
i=0
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/etc/internal_options.conf"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/integrations/pagerduty"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/integrations/slack"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/integrations/slack.py"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/integrations/virustotal"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/integrations/virustotal.py"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/default-firewall-drop.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/disable-account.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/firewalld-drop.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/firewall-drop.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/host-deny.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/ip-customblock.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/ipfw_mac.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/ipfw.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/kaspersky.py"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/kaspersky.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/npf.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/ossec-slack.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/ossec-tweeter.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/pf.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/restart-ossec.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/restart.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/active-response/bin/route-null.sh"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/aws/aws-s3"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/aws/aws-s3.py"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/azure/azure-logs"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/azure/azure-logs.py"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/docker/DockerListener"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/docker/DockerListener.py"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/oscap/oscap"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/oscap/oscap.py"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/oscap/template_oval.xsl"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/oscap/template_xccdf.xsl"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/oscap/content/cve-debian-8-oval.xml"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/oscap/content/cve-debian-9-oval.xml"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/oscap/content/cve-ubuntu-xenial-oval.xml"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/oscap/content/ssg-debian-8-ds.xml"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/oscap/content/ssg-ubuntu-1404-ds.xml"
|
||||
PERMANENT_DATA_EXCP[((i++))]="/var/ossec/wodles/oscap/content/ssg-ubuntu-1604-ds.xml"
|
||||
export PERMANENT_DATA_EXCP
|
||||
|
||||
# Files mounted in a volume that should be deleted
|
||||
i=0
|
||||
PERMANENT_DATA_DEL[((i++))]="/var/ossec/queue/db/.template.db"
|
||||
export PERMANENT_DATA_DEL
|
40
wazuh/config/permanent_data.sh
Normal file
40
wazuh/config/permanent_data.sh
Normal file
@@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
# Wazuh App Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
|
||||
# Variables
|
||||
source /permanent_data.env
|
||||
|
||||
WAZUH_INSTALL_PATH=/var/ossec
|
||||
DATA_TMP_PATH=${WAZUH_INSTALL_PATH}/data_tmp
|
||||
mkdir ${DATA_TMP_PATH}
|
||||
|
||||
# Move exclusion files to EXCLUSION_PATH
|
||||
EXCLUSION_PATH=${DATA_TMP_PATH}/exclusion
|
||||
mkdir ${EXCLUSION_PATH}
|
||||
|
||||
for exclusion_file in "${PERMANENT_DATA_EXCP[@]}"; do
|
||||
# Create the directory for the exclusion file if it does not exist
|
||||
DIR=$(dirname "${exclusion_file}")
|
||||
if [ ! -e ${EXCLUSION_PATH}/${DIR} ]
|
||||
then
|
||||
mkdir -p ${EXCLUSION_PATH}/${DIR}
|
||||
fi
|
||||
|
||||
mv ${exclusion_file} ${EXCLUSION_PATH}/${exclusion_file}
|
||||
done
|
||||
|
||||
# Move permanent files to PERMANENT_PATH
|
||||
PERMANENT_PATH=${DATA_TMP_PATH}/permanent
|
||||
mkdir ${PERMANENT_PATH}
|
||||
|
||||
for permanent_dir in "${PERMANENT_DATA[@]}"; do
|
||||
# Create the directory for the permanent file if it does not exist
|
||||
DIR=$(dirname "${permanent_dir}")
|
||||
if [ ! -e ${PERMANENT_PATH}${DIR} ]
|
||||
then
|
||||
mkdir -p ${PERMANENT_PATH}${DIR}
|
||||
fi
|
||||
|
||||
mv ${permanent_dir} ${PERMANENT_PATH}${permanent_dir}
|
||||
|
||||
done
|
4
wazuh/config/postfix.runit.service
Normal file
4
wazuh/config/postfix.runit.service
Normal file
@@ -0,0 +1,4 @@
|
||||
#!/bin/sh
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
service postfix start
|
||||
tail -f /var/log/mail.log
|
@@ -1,79 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
#
|
||||
# OSSEC container bootstrap. See the README for information of the environment
|
||||
# variables expected by this script.
|
||||
#
|
||||
|
||||
#
|
||||
|
||||
#
|
||||
# Startup the services
|
||||
#
|
||||
|
||||
source /data_dirs.env
|
||||
FIRST_TIME_INSTALLATION=false
|
||||
DATA_PATH=/var/ossec/data
|
||||
|
||||
for ossecdir in "${DATA_DIRS[@]}"; do
|
||||
if [ ! -e "${DATA_PATH}/${ossecdir}" ]
|
||||
then
|
||||
echo "Installing ${ossecdir}"
|
||||
mkdir -p $(dirname ${DATA_PATH}/${ossecdir})
|
||||
cp -pr /var/ossec/${ossecdir}-template ${DATA_PATH}/${ossecdir}
|
||||
FIRST_TIME_INSTALLATION=true
|
||||
fi
|
||||
done
|
||||
|
||||
touch ${DATA_PATH}/process_list
|
||||
chgrp ossec ${DATA_PATH}/process_list
|
||||
chmod g+rw ${DATA_PATH}/process_list
|
||||
|
||||
AUTO_ENROLLMENT_ENABLED=${AUTO_ENROLLMENT_ENABLED:-true}
|
||||
|
||||
if [ $FIRST_TIME_INSTALLATION == true ]
|
||||
then
|
||||
|
||||
if [ $AUTO_ENROLLMENT_ENABLED == true ]
|
||||
then
|
||||
if [ ! -e ${DATA_PATH}/etc/sslmanager.key ]
|
||||
then
|
||||
echo "Creating ossec-authd key and cert"
|
||||
openssl genrsa -out ${DATA_PATH}/etc/sslmanager.key 4096
|
||||
openssl req -new -x509 -key ${DATA_PATH}/etc/sslmanager.key\
|
||||
-out ${DATA_PATH}/etc/sslmanager.cert -days 3650\
|
||||
-subj /CN=${HOSTNAME}/
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
function ossec_shutdown(){
|
||||
/var/ossec/bin/ossec-control stop;
|
||||
if [ $AUTO_ENROLLMENT_ENABLED == true ]
|
||||
then
|
||||
kill $AUTHD_PID
|
||||
fi
|
||||
}
|
||||
|
||||
# Trap exit signals and do a proper shutdown
|
||||
trap "ossec_shutdown; exit" SIGINT SIGTERM
|
||||
|
||||
chmod -R g+rw ${DATA_PATH}
|
||||
|
||||
if [ $AUTO_ENROLLMENT_ENABLED == true ]
|
||||
then
|
||||
echo "Starting ossec-authd..."
|
||||
/var/ossec/bin/ossec-authd -p 1515 -g ossec $AUTHD_OPTIONS >/dev/null 2>&1 &
|
||||
AUTHD_PID=$!
|
||||
fi
|
||||
sleep 15 # give ossec a reasonable amount of time to start before checking status
|
||||
LAST_OK_DATE=`date +%s`
|
||||
|
||||
## Start services
|
||||
/usr/sbin/postfix start
|
||||
/bin/node /var/ossec/api/app.js &
|
||||
/usr/bin/filebeat.sh &
|
||||
/var/ossec/bin/ossec-control restart
|
||||
|
||||
|
||||
tail -f /var/ossec/logs/ossec.log
|
5
wazuh/config/wazuh-api.runit.service
Normal file
5
wazuh/config/wazuh-api.runit.service
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/sh
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
service wazuh-api start
|
||||
tail -f /var/ossec/logs/api.log
|
||||
|
BIN
wazuh/config/wazuh-api_3.11.5-1_amd64.deb
Normal file
BIN
wazuh/config/wazuh-api_3.11.5-1_amd64.deb
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
||||
b4bbb79aca532ca4f5321a89f9dffae1f934bc6f
|
@@ -1,7 +0,0 @@
|
||||
[wazuh_repo]
|
||||
gpgcheck=1
|
||||
gpgkey=https://packages.wazuh.com/key/GPG-KEY-WAZUH
|
||||
enabled=1
|
||||
name=CENTOS-$releasever - Wazuh
|
||||
baseurl=https://packages.wazuh.com/yum/el/$releasever/$basearch
|
||||
protect=1
|
5
wazuh/config/wazuh.runit.service
Normal file
5
wazuh/config/wazuh.runit.service
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/sh
|
||||
# Wazuh Docker Copyright (C) 2019 Wazuh Inc. (License GPLv2)
|
||||
service wazuh-manager start
|
||||
tail -f /var/ossec/logs/ossec.log
|
||||
|
Reference in New Issue
Block a user