Initial commit

This commit is contained in:
root
2023-08-09 14:01:28 +02:00
commit f4efbc7a63
199 changed files with 13338 additions and 0 deletions

1
erpnext/build Submodule

Submodule erpnext/build added at eb88625f93

308
erpnext/docker-compose.yml Normal file
View File

@@ -0,0 +1,308 @@
name: frappe_docker
services:
backend:
depends_on:
configurator:
condition: service_completed_successfully
image: erpnexthp:1.0.0
networks:
erpnext_network: null
volumes:
- type: volume
source: erpnext_data-sites
target: /home/frappe/frappe-bench/sites
volume: {}
configurator:
command:
- |
ls -1 apps > sites/apps.txt; bench set-config -g db_host $$DB_HOST; bench set-config -gp db_port $$DB_PORT; bench set-config -g redis_cache "redis://$$REDIS_CACHE"; bench set-config -g redis_queue "redis://$$REDIS_QUEUE"; bench set-config -g redis_socketio "redis://$$REDIS_SOCKETIO"; bench set-config -gp socketio_port $$SOCKETIO_PORT;
depends_on:
db:
condition: service_healthy
redis-cache:
condition: service_started
redis-queue:
condition: service_started
redis-socketio:
condition: service_started
entrypoint:
- bash
- -c
environment:
DB_HOST: db
DB_PORT: "3306"
REDIS_CACHE: redis-cache:6379
REDIS_QUEUE: redis-queue:6379
REDIS_SOCKETIO: redis-socketio:6379
SOCKETIO_PORT: "9000"
image: erpnexthp:1.0.0
networks:
erpnext_network: null
volumes:
- type: volume
source: erpnext_data-sites
target: /home/frappe/frappe-bench/sites
volume: {}
db:
command:
- --character-set-server=utf8mb4
- --collation-server=utf8mb4_unicode_ci
- --skip-character-set-client-handshake
- --skip-innodb-read-only-compressed
environment:
MYSQL_ROOT_PASSWORD: EoxXraNDDYSdY5Hq
healthcheck:
test:
- CMD-SHELL
- mysqladmin ping -h localhost --password=EoxXraNDDYSdY5Hq
interval: 1s
retries: 15
image: mariadb:10.6
networks:
erpnext_network: null
volumes:
- type: volume
source: db-data
target: /var/lib/mysql
volume: {}
frontend-1:
command:
- nginx-entrypoint.sh
depends_on:
backend:
condition: service_started
websocket:
condition: service_started
environment:
BACKEND: backend:8000
CLIENT_MAX_BODY_SIZE: 50m
FRAPPE_SITE_NAME_HEADER: erp.prothmann.com
PROXY_READ_TIMOUT: "120"
SOCKETIO: websocket:9000
UPSTREAM_REAL_IP_ADDRESS: 127.0.0.1
UPSTREAM_REAL_IP_HEADER: X-Forwarded-For
UPSTREAM_REAL_IP_RECURSIVE: "off"
image: erpnexthp:1.0.0
networks:
erpnext_network: null
proxy: null
ports:
- mode: ingress
target: 8080
published: "8082"
protocol: tcp
volumes:
- type: volume
source: erpnext_data-sites
target: /home/frappe/frappe-bench/sites
volume: {}
labels:
traefik.docker.network: proxy
traefik.enable: "true"
traefik.http.routers.erpnext-secure.entrypoints: websecure
traefik.http.routers.erpnext-secure.middlewares: default@file
traefik.http.routers.erpnext-secure.rule: Host(`erp.prothmann.com`)
traefik.http.routers.erpnext-secure.service: erpnext
traefik.http.routers.erpnext-secure.tls: "true"
traefik.http.routers.erpnext-secure.tls.certresolver: http_resolver
traefik.http.routers.erpnext.entrypoints: web
traefik.http.routers.erpnext.rule: Host(`erp.prothmann.com`)
traefik.http.services.erpnext.loadbalancer.server.port: "8080"
frontend-2:
command:
- nginx-entrypoint.sh
depends_on:
backend:
condition: service_started
websocket:
condition: service_started
environment:
BACKEND: backend:8000
CLIENT_MAX_BODY_SIZE: 50m
FRAPPE_SITE_NAME_HEADER: erp.ppa.prothmann.com
PROXY_READ_TIMOUT: "120"
SOCKETIO: websocket:9000
UPSTREAM_REAL_IP_ADDRESS: 127.0.0.1
UPSTREAM_REAL_IP_HEADER: X-Forwarded-For
UPSTREAM_REAL_IP_RECURSIVE: "off"
image: erpnexthp:1.0.0
networks:
erpnext_network: null
proxy: null
ports:
- mode: ingress
target: 8080
published: "8080"
protocol: tcp
volumes:
- type: volume
source: erpnext_data-sites
target: /home/frappe/frappe-bench/sites
volume: {}
labels:
traefik.docker.network: proxy
traefik.enable: "true"
traefik.http.routers.erpnext-ppa-secure.entrypoints: websecure
traefik.http.routers.erpnext-ppa-secure.middlewares: default@file
traefik.http.routers.erpnext-ppa-secure.rule: Host(`erp.ppa.prothmann.com`)
traefik.http.routers.erpnext-ppa-secure.service: erpnext-ppa
traefik.http.routers.erpnext-ppa-secure.tls: "true"
traefik.http.routers.erpnext-ppa-secure.tls.certresolver: http_resolver
traefik.http.routers.erpnext-ppa.entrypoints: web
traefik.http.routers.erpnext-ppa.rule: Host(`erp.ppa.prothmann.com`)
traefik.http.services.erpnext-ppa.loadbalancer.server.port: "8080"
queue-default:
command:
- bench
- worker
- --queue
- default
depends_on:
configurator:
condition: service_completed_successfully
image: erpnexthp:1.0.0
networks:
erpnext_network: null
volumes:
- type: volume
source: erpnext_data-sites
target: /home/frappe/frappe-bench/sites
volume: {}
queue-long:
command:
- bench
- worker
- --queue
- long
depends_on:
configurator:
condition: service_completed_successfully
image: erpnexthp:1.0.0
networks:
erpnext_network: null
volumes:
- type: volume
source: erpnext_data-sites
target: /home/frappe/frappe-bench/sites
volume: {}
queue-short:
command:
- bench
- worker
- --queue
- short
depends_on:
configurator:
condition: service_completed_successfully
image: erpnexthp:1.0.0
networks:
erpnext_network: null
volumes:
- type: volume
source: erpnext_data-sites
target: /home/frappe/frappe-bench/sites
volume: {}
redis-cache:
image: redis:6.2-alpine
networks:
erpnext_network: null
volumes:
- type: volume
source: redis-cache-data
target: /data
volume: {}
redis-queue:
image: redis:6.2-alpine
networks:
erpnext_network: null
volumes:
- type: volume
source: redis-queue-data
target: /data
volume: {}
redis-socketio:
image: redis:6.2-alpine
networks:
erpnext_network: null
volumes:
- type: volume
source: redis-socketio-data
target: /data
volume: {}
scheduler:
command:
- bench
- schedule
depends_on:
configurator:
condition: service_completed_successfully
image: erpnexthp:1.0.0
networks:
erpnext_network: null
volumes:
- type: volume
source: erpnext_data-sites
target: /home/frappe/frappe-bench/sites
volume: {}
websocket:
command:
- node
- /home/frappe/frappe-bench/apps/frappe/socketio.js
depends_on:
configurator:
condition: service_completed_successfully
image: erpnexthp:1.0.0
networks:
erpnext_network: null
volumes:
- type: volume
source: erpnext_data-sites
target: /home/frappe/frappe-bench/sites
volume: {}
networks:
erpnext_network:
name: erpnext_network
driver: bridge
ipam:
config:
- subnet: 172.33.0.0/16
attachable: true
proxy:
external: true
volumes:
db-data:
name: frappe_docker_db-data
redis-cache-data:
name: frappe_docker_redis-cache-data
redis-queue-data:
name: frappe_docker_redis-queue-data
redis-socketio-data:
name: frappe_docker_redis-socketio-data
erpnext_data-sites:
driver: local
driver_opts:
type: nfs
o: "addr=192.168.102.14,rw,nolock,nfsvers=4,async"
device: ":/data/docker-volumes/erpnext_data/sites"
x-backend-defaults:
depends_on:
configurator:
condition: service_completed_successfully
image: frappe/erpnext:v14.29.1
volumes:
- sites:/home/frappe/frappe-bench/sites
x-customizable-image:
image: erpnexthp:1.0.0
pull_policy: never
x-depends-on-configurator:
depends_on:
configurator:
condition: service_completed_successfully
# Setting up a site:
# change directory to docker directory for erpnext (cd /srv/docker/erpnext)
#
# docker compose exec backend bench new-site erp.prothmann.com --mariadb-root-password EoxXraNDDYSdY5Hq --install-app erpnext --admin-password veiph1ahrieM
# Build new image: in /opt/frappe_docker:
#./build.sh

View File

@@ -0,0 +1,12 @@
{
"db_name": "_85af828d36290e1b",
"db_password": "DHjYmLB3tRnTwcC9",
"db_type": "mariadb",
"developer_mode": 1,
"domains": [
"erp.prothmann.com",
"10.1.0.26"
],
"encryption_key": "7EGko4BbInt2DuC6mPVAQQ_dik6S1uzdSMluzy-y5r8=",
"maintenance_mode": 1
}

BIN
erpnext/transfer/db.sql.gz Normal file

Binary file not shown.

Binary file not shown.

BIN
erpnext/transfer/public.tar Normal file

Binary file not shown.

1
getmail Submodule

Submodule getmail added at 33facd3fb9

1
gitea/README.md Normal file
View File

@@ -0,0 +1 @@
https://goneuland.de/gitea-code-hosting-mittels-docker-compose-und-traefik-installieren/

65
gitea/docker-compose.yml Normal file
View File

@@ -0,0 +1,65 @@
version: "3"
services:
server:
image: gitea/gitea:latest
container_name: gitea
environment:
- USER_UID=1000
- USER_GID=1000
- GITEA__database__DB_TYPE=postgres
- GITEA__database__HOST=db:5432
- GITEA__database__NAME=gitea
- GITEA__database__USER=gitea
- GITEA__database__PASSWD=yOicC59zRKYMixZC
restart: unless-stopped
networks:
- proxy
- gitea_network
volumes:
- gitea_data:/data
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
ports:
- "222:22"
depends_on:
- db
labels:
- "traefik.enable=true"
- "traefik.http.routers.gitea.entrypoints=websecure"
- "traefik.http.routers.gitea.rule=(Host(`git.prothmann.com`))"
- "traefik.http.routers.gitea.tls=true"
- "traefik.http.routers.gitea.tls.certresolver=http_resolver"
- "traefik.http.routers.gitea.service=gitea"
- "traefik.http.services.gitea.loadbalancer.server.port=3000"
- "traefik.docker.network=proxy"
- "traefik.http.routers.gitea.middlewares=default@file"
db:
image: postgres:14
restart: always
container_name: gitea-db
environment:
- POSTGRES_USER=gitea
- POSTGRES_PASSWORD=yOicC59zRKYMixZC
- POSTGRES_DB=gitea
volumes:
- gitea_postgres:/var/lib/postgresql/data
networks:
- gitea_network
volumes:
gitea_data:
driver: local
driver_opts:
type: nfs
o: "addr=192.168.102.14,rw,nolock,nfsvers=4,async"
device: ":/data/docker-volumes/gitea_data"
gitea_postgres:
driver: local
networks:
proxy:
external: true
gitea_network:
name: gitea_network

1
mailcow Submodule

Submodule mailcow added at d6c3c58f42

1
nextcloud-aio/README.md Normal file
View File

@@ -0,0 +1 @@
https://goneuland.de/nextcloud-all-in-one-mit-docker-compose-und-traefik-installieren/

View File

@@ -0,0 +1,62 @@
version: "3.8"
volumes:
nextcloud_aio_mastercontainer:
name: nextcloud_aio_mastercontainer # This line is not allowed to be changed
nextcloud_aio_nextcloud_datadir:
driver: local
driver_opts:
type: nfs
o: "addr=10.4.0.14,rw,nolock,nfsvers=4,async"
device: ":/data/docker-volumes/nextcloud_data"
services:
nextcloud:
image: nextcloud/all-in-one:latest # Must be changed to 'nextcloud/all-in-one:latest-arm64' when used with an arm64 CPU
restart: unless-stopped
container_name: nextcloud-aio-mastercontainer # This line is not allowed to be changed
volumes:
- nextcloud_aio_mastercontainer:/mnt/docker-aio-config # This line is not allowed to be changed
- /var/run/docker.sock:/var/run/docker.sock:ro # May be changed on macOS, Windows or docker rootless. See the applicable documentation. If adjusting, don't forget to also set 'DOCKER_SOCKET_PATH'!
ports:
- 8081:8080
environment:
# Is needed when using any of the options below
- APACHE_PORT=11000 # Is needed when running behind a reverse proxy. See https://github.com/nextcloud/all-in-one/blob/main/reverse-proxy.md
# - APACHE_IP_BINDING=0.0.0.0 # Should be set when running behind a reverse proxy that is running on the same host. See https://github.com/nextcloud/all-in-one/blob/main/reverse-proxy.md
# - COLLABORA_SECCOMP_DISABLED=false # Setting this to true allows to disable Collabora's Seccomp feature. See https://github.com/nextcloud/all-in-one#how-to-disable-collaboras-seccomp-feature
# - DOCKER_SOCKET_PATH=/var/run/docker.sock # Needs to be specified if the docker socket on the host is not located in the default '/var/run/docker.sock'. Otherwise mastercontainer updates will fail. For macos it needs to be '/var>
# - DISABLE_BACKUP_SECTION=false # Setting this to true allows to hide the backup section in the AIO interface.
- NEXTCLOUD_DATADIR=nextcloud_aio_nextcloud_datadir # Allows to set the host directory for Nextcloud's datadir. See https://github.com/nextcloud/all-in-one#how-to-change-the-default-location-of-nextclouds-datadir
# - NEXTCLOUD_MOUNT=/mnt/ # Allows the Nextcloud container to access the chosen directory on the host. See https://github.com/nextcloud/all-in-one#how-to-allow-the-nextcloud-container-to-access-directories-on-the-host
# - NEXTCLOUD_UPLOAD_LIMIT=10G # Can be adjusted if you need more. See https://github.com/nextcloud/all-in-one#how-to-adjust-the-upload-limit-for-nextcloud
# - NEXTCLOUD_MAX_TIME=3600 # Can be adjusted if you need more. See https://github.com/nextcloud/all-in-one#how-to-adjust-the-max-execution-time-for-nextcloud
# - NEXTCLOUD_MEMORY_LIMIT=4096M # Can be adjusted if you need more. See https://github.com/nextcloud/all-in-one#how-to-adjust-the-php-memory-limit-for-nextcloud
# - NEXTCLOUD_TRUSTED_CACERTS_DIR=/path/to/my/cacerts # CA certificates in this directory will be trusted by the OS of the nexcloud container (Useful e.g. for LDAPS) See See https://github.com/nextcloud/all-in-one#how-to-trust-use>
# - NEXTCLOUD_STARTUP_APPS=deck twofactor_totp tasks calendar contacts # Allows to modify the Nextcloud apps that are installed on starting AIO the first time. See https://github.com/nextcloud/all-in-one#how-to-change-the-nextclou>
- NEXTCLOUD_ADDITIONAL_APKS=imagemagick libpq-dev # This allows to add additional packages to the Nextcloud container permanently. Default is imagemagick but can be overwritten by modifying this value. See https://github.com/nextc>
- NEXTCLOUD_ADDITIONAL_PHP_EXTENSIONS=imagick pgsql # This allows to add additional php extensions to the Nextcloud container permanently. Default is imagick but can be overwritten by modifying this value. See https://github.com/n>
# - NEXTCLOUD_ENABLE_DRI_DEVICE=true # This allows to enable the /dev/dri device in the Nextcloud container which is needed for hardware-transcoding. See https://github.com/nextcloud/all-in-one#how-to-enable-hardware-transcoding-f>
# - TALK_PORT=3478 # This allows to adjust the port that the talk container is using.
labels:
- "traefik.enable=true"
- "traefik.http.routers.nextcloud.entrypoints=websecure"
- "traefik.http.routers.nextcloud.rule=(Host(`nextcloud.prothmann.com`))"
- "traefik.http.routers.nextcloud.tls=true"
- "traefik.http.routers.nextcloud.tls.certresolver=http_resolver"
- "traefik.http.routers.nextcloud.service=nextcloud@file"
- "traefik.http.services.nextcloud.loadbalancer.server.port=443"
- "traefik.docker.network=proxy"
- "traefik.http.routers.nextcloud.middlewares=default@file"
networks:
- proxy
networks:
proxy:
external: true
# Nextcloud Aio Mastercontainer pw: twilight riveter earflap spyglass cider chaperone paralyze reporter
# Borg passphrase: 6590d290de4c310e51b7d38367c25f475e9a731e6c5b1a62
#'instanceid' => 'ocvifdbk8s5l',
#'passwordsalt' => 'U5+ROpMvCcVNmExY/Q3YocyoT9jm8K',
#'secret' => '3bSChu9b1Xo3KO0HL1O++aQ5IcGCm8YEXDQfpLKInrIdUaYH',

View File

@@ -0,0 +1,31 @@
version: '3.3'
services:
phpmyadmin:
image: phpmyadmin
restart: unless-stopped
environment:
MYSQL_ROOT_PASSWORD: EoxXraNDDYSdY5Hq
PMA_ARBITRARY: 1
PMA_HOST: 172.33.0.4
networks:
- proxy
- erpnext_network
labels:
traefik.docker.network: proxy
traefik.enable: "true"
traefik.http.routers.phpmyadmin-secure.entrypoints: websecure
traefik.http.routers.phpmyadmin-secure.middlewares: default@file
traefik.http.routers.phpmyadmin-secure.rule: Host(`phpmyadmin.prothmann.com`)
traefik.http.routers.phpmyadmin-secure.service: phpmyadmin
traefik.http.routers.phpmyadmin-secure.tls: "true"
traefik.http.routers.phpmyadmin-secure.tls.certresolver: http_resolver
traefik.http.routers.phpmyadmin.entrypoints: web
traefik.http.routers.phpmyadmin.rule: Host(`phpmyadmin.prothmann.com`)
traefik.http.services.phpmyadmin.loadbalancer.server.port: "80"
networks:
proxy:
external: true
erpnext_network:
external: true

View File

@@ -0,0 +1,39 @@
version: '3'
services:
portainer:
image: portainer/portainer:latest
container_name: portainer
restart: unless-stopped
security_opt:
- no-new-privileges:true
networks:
- proxy
volumes:
- /etc/localtime:/etc/localtime:ro
- /var/run/docker.sock:/var/run/docker.sock:ro
- portainer_data:/data
labels:
traefik.docker.network: proxy
traefik.enable: "true"
traefik.http.routers.portainer-secure.entrypoints: websecure
traefik.http.routers.portainer-secure.middlewares: default@file
traefik.http.routers.portainer-secure.rule: Host(`portainer.prothmann.com`)
traefik.http.routers.portainer-secure.service: portainer
traefik.http.routers.portainer-secure.tls: "true"
traefik.http.routers.portainer-secure.tls.certresolver: http_resolver
traefik.http.routers.portainer.entrypoints: web
traefik.http.routers.portainer.rule: Host(`nextcloud.prothmann.com`)
traefik.http.services.portainer.loadbalancer.server.port: "9000"
volumes:
portainer_data:
driver: local
driver_opts:
type: nfs
o: "addr=192.168.102.14,rw,nolock,nfsvers=4,async"
device: ":/data/docker-volumes/portainer_data"
networks:
proxy:
external: true

View File

@@ -0,0 +1,28 @@
# Service Crowdsec
SERVICES_CROWDSEC_CONTAINER_NAME=crowdsec
SERVICES_CROWDSEC_HOSTNAME=crowdsec
SERVICES_CROWDSEC_IMAGE=crowdsecurity/crowdsec
SERVICES_CROWDSEC_IMAGE_VERSION=latest
SERVICES_CROWDSEC_NETWORKS_CROWDSEC_IPV4=172.31.254.254
# Service Traefik
SERVICES_TRAEFIK_CONTAINER_NAME=traefik
SERVICES_TRAEFIK_HOSTNAME=traefik
SERVICES_TRAEFIK_IMAGE=traefik
SERVICES_TRAEFIK_IMAGE_VERSION=2.10
SERVICES_TRAEFIK_LABELS_TRAEFIK_HOST=`traefik.prothmann.com`
SERVICES_TRAEFIK_NETWORKS_CROWDSEC_IPV4=172.31.254.253
SERVICES_TRAEFIK_NETWORKS_PROXY_IPV4=172.30.255.254
# Service Traefik Crowdsec Bouncer
SERVICES_TRAEFIK_CROWDSEC_BOUNCER_CONTAINER_NAME=traefik_crowdsec_bouncer
SERVICES_TRAEFIK_CROWDSEC_BOUNCER_HOSTNAME=traefik-crowdsec-bouncer
SERVICES_TRAEFIK_CROWDSEC_BOUNCER_IMAGE=fbonalair/traefik-crowdsec-bouncer
SERVICES_TRAEFIK_CROWDSEC_BOUNCER_IMAGE_VERSION=latest
SERVICES_TRAEFIK_CROWDSEC_BOUNCER_NETWORKS_CROWDSEC_IPV4=172.31.254.252
# Netzwerkeinstellungen
NETWORKS_PROXY_NAME=proxy
NETWORKS_PROXY_SUBNET_IPV4=172.30.0.0/16
NETWORKS_CROWDSEC_NAME=crowdsec
NETWORKS_CROWDSEC_SUBNET_IPV4=172.31.0.0/16

View File

@@ -0,0 +1 @@
https://goneuland.de/traefik-v2-3-reverse-proxy-mit-crowdsec-im-stack-einrichten/#32_docker-composeyml_anlegen

View File

@@ -0,0 +1,2 @@
PGID="1000"
COLLECTIONS="crowdsecurity/traefik crowdsecurity/http-cve crowdsecurity/whitelist-good-actors crowdsecurity/postfix crowdsecurity/dovecot crowdsecurity/nginx"

View File

@@ -0,0 +1,4 @@
# Access-Token damit Bouncer und CrowdSec kommunizieren können
CROWDSEC_BOUNCER_API_KEY=2af497632319f6d35623c43b69cd7c15
# Hostname mit richtigem Port von CrowdSec
CROWDSEC_AGENT_HOST=${SERVICES_CROWDSEC_HOSTNAME}:8080

View File

@@ -0,0 +1,11 @@
filenames:
- /var/log/auth.log
- /var/log/syslog
labels:
type: syslog
---
filenames:
- /var/log/traefik/*.log
labels:
type: traefik
---

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/base-http-scenarios.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/dovecot.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/http-cve.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/nginx.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/postfix.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/traefik.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/whitelist-good-actors.yaml

View File

@@ -0,0 +1,49 @@
common:
daemonize: false
log_media: stdout
log_level: info
log_dir: /var/log/
working_dir: .
config_paths:
config_dir: /etc/crowdsec/
data_dir: /var/lib/crowdsec/data/
simulation_path: /etc/crowdsec/simulation.yaml
hub_dir: /etc/crowdsec/hub/
index_path: /etc/crowdsec/hub/.index.json
notification_dir: /etc/crowdsec/notifications/
plugin_dir: /usr/local/lib/crowdsec/plugins/
crowdsec_service:
acquisition_path: /etc/crowdsec/acquis.yaml
acquisition_dir: /etc/crowdsec/acquis.d
parser_routines: 1
plugin_config:
user: nobody
group: nobody
cscli:
output: human
db_config:
log_level: info
type: sqlite
db_path: /var/lib/crowdsec/data/crowdsec.db
flush:
max_items: 5000
max_age: 7d
use_wal: false
api:
client:
insecure_skip_verify: false
credentials_path: /etc/crowdsec/local_api_credentials.yaml
server:
log_level: info
listen_uri: 0.0.0.0:8080
profiles_path: /etc/crowdsec/profiles.yaml
trusted_ips: # IP ranges, or IPs which can have admin API access
- 127.0.0.1
- ::1
online_client: # Central API credentials (to push signals and receive bad IPs)
credentials_path: /etc/crowdsec//online_api_credentials.yaml
prometheus:
enabled: true
level: full
listen_addr: 0.0.0.0
listen_port: 6060

View File

@@ -0,0 +1,4 @@
share_manual_decisions: false
share_custom: true
share_tainted: true
share_context: false

View File

@@ -0,0 +1,47 @@
common:
daemonize: true
log_media: stdout
log_level: info
working_dir: .
config_paths:
config_dir: ./config
data_dir: ./data/
notification_dir: ./config/notifications/
plugin_dir: ./plugins/
#simulation_path: /etc/crowdsec/config/simulation.yaml
#hub_dir: /etc/crowdsec/hub/
#index_path: ./config/hub/.index.json
crowdsec_service:
acquisition_path: ./config/acquis.yaml
parser_routines: 1
plugin_config:
user: $USER # plugin process would be ran on behalf of this user
group: $USER # plugin process would be ran on behalf of this group
cscli:
output: human
db_config:
type: sqlite
db_path: ./data/crowdsec.db
user: root
password: crowdsec
db_name: crowdsec
host: "172.17.0.2"
port: 3306
flush:
#max_items: 10000
#max_age: 168h
api:
client:
credentials_path: ./config/local_api_credentials.yaml
server:
#insecure_skip_verify: true
listen_uri: 127.0.0.1:8081
profiles_path: ./config/profiles.yaml
tls:
#cert_file: ./cert.pem
#key_file: ./key.pem
online_client: # Central API
credentials_path: ./config/online_api_credentials.yaml
prometheus:
enabled: true
level: full

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,25 @@
parsers:
- crowdsecurity/http-logs
scenarios:
- crowdsecurity/http-crawl-non_statics
- crowdsecurity/http-probing
- crowdsecurity/http-bad-user-agent
- crowdsecurity/http-path-traversal-probing
- crowdsecurity/http-sensitive-files
- crowdsecurity/http-sqli-probing
- crowdsecurity/http-xss-probing
- crowdsecurity/http-backdoors-attempts
- ltsich/http-w00tw00t
- crowdsecurity/http-generic-bf
- crowdsecurity/http-open-proxy
collections:
- crowdsecurity/http-cve
description: "http common : scanners detection"
author: crowdsecurity
tags:
- linux
- http
- crawl
- scan

View File

@@ -0,0 +1,10 @@
parsers:
- crowdsecurity/dovecot-logs
scenarios:
- crowdsecurity/dovecot-spam
description: "dovecot support : parser and spammer detection"
author: crowdsecurity
tags:
- linux
- spam
- bruteforce

View File

@@ -0,0 +1,30 @@
scenarios:
- crowdsecurity/http-cve-2021-41773
- crowdsecurity/http-cve-2021-42013
- crowdsecurity/grafana-cve-2021-43798
- crowdsecurity/vmware-vcenter-vmsa-2021-0027
- crowdsecurity/fortinet-cve-2018-13379
- crowdsecurity/pulse-secure-sslvpn-cve-2019-11510
- crowdsecurity/f5-big-ip-cve-2020-5902
- crowdsecurity/thinkphp-cve-2018-20062
- crowdsecurity/apache_log4j2_cve-2021-44228
- crowdsecurity/jira_cve-2021-26086
- crowdsecurity/spring4shell_cve-2022-22965
- crowdsecurity/vmware-cve-2022-22954
- crowdsecurity/CVE-2022-37042
- crowdsecurity/CVE-2022-41082
- crowdsecurity/CVE-2022-35914
- crowdsecurity/CVE-2022-40684
- crowdsecurity/CVE-2022-26134
- crowdsecurity/CVE-2022-42889
- crowdsecurity/CVE-2022-41697
- crowdsecurity/CVE-2022-46169
- crowdsecurity/CVE-2022-44877
- crowdsecurity/CVE-2019-18935
- crowdsecurity/netgear_rce
author: crowdsecurity
tags:
- web
- exploit
- cve
- http

View File

@@ -0,0 +1,11 @@
parsers:
- crowdsecurity/syslog-logs
- crowdsecurity/geoip-enrich
- crowdsecurity/dateparse-enrich
collections:
- crowdsecurity/sshd
description: "core linux support : syslog+geoip+ssh"
author: crowdsecurity
tags:
- linux

View File

@@ -0,0 +1,15 @@
parsers:
#generic post-parsing of http stuff
- crowdsecurity/nginx-logs
collections:
- crowdsecurity/base-http-scenarios
scenarios:
- crowdsecurity/nginx-req-limit-exceeded
description: "nginx support : parser and generic http scenarios"
author: crowdsecurity
tags:
- linux
- nginx
- crawl
- scan

View File

@@ -0,0 +1,11 @@
parsers:
- crowdsecurity/postfix-logs
- crowdsecurity/postscreen-logs
scenarios:
- crowdsecurity/postfix-spam
description: "postfix support : parser and spammer detection"
author: crowdsecurity
tags:
- linux
- spam
- bruteforce

View File

@@ -0,0 +1,12 @@
parsers:
- crowdsecurity/sshd-logs
scenarios:
- crowdsecurity/ssh-bf
- crowdsecurity/ssh-slow-bf
description: "sshd support : parser and brute-force detection"
author: crowdsecurity
tags:
- linux
- ssh
- bruteforce

View File

@@ -0,0 +1,12 @@
# co-authored with gmelodie (https://github.com/gmelodie)
parsers:
- crowdsecurity/traefik-logs
collections:
- crowdsecurity/base-http-scenarios
description: "traefik support: parser and generic http scenarios"
author: crowdsecurity
tags:
- traefik
- http
- bruteforce

View File

@@ -0,0 +1,10 @@
postoverflows:
- crowdsecurity/seo-bots-whitelist
- crowdsecurity/cdn-whitelist
- crowdsecurity/rdns
description: "Good actors whitelists"
author: crowdsecurity
tags:
- whitelist
- bots
- partners

View File

@@ -0,0 +1,19 @@
filter: "evt.Line.Labels.type == 'containerd'"
onsuccess: next_stage
name: crowdsecurity/cri-logs
description: CRI logging format parser
nodes:
- grok:
pattern: "^%{TIMESTAMP_ISO8601:cri_timestamp} %{WORD:stream} %{WORD:logtag} %{GREEDYDATA:message}"
apply_on: Line.Raw
statics:
- parsed: "logsource"
value: "cri"
- target: evt.StrTime
expression: evt.Parsed.cri_timestamp
- parsed: program
expression: evt.Line.Labels.program
- meta: datasource_path
expression: evt.Line.Src
- meta: datasource_type
expression: evt.Line.Module

View File

@@ -0,0 +1,12 @@
#If it's docker, we are going to extract log line from it
filter: "evt.Line.Labels.type == 'docker'"
onsuccess: next_stage
name: crowdsecurity/docker-logs
description: docker json logs parser
statics:
- target: evt.StrTime
expression: JsonExtract(evt.Line.Raw, "time")
- parsed: message
expression: JsonExtractUnescape(evt.Line.Raw, "log")
- parsed: program
expression: evt.Line.Labels.program

View File

@@ -0,0 +1,48 @@
#If it's syslog, we are going to extract progname from it
filter: "evt.Line.Labels.type == 'syslog'"
onsuccess: next_stage
pattern_syntax:
RAW_SYSLOG_PREFIX: '^<%{NUMBER:stuff1}>%{NUMBER:stuff2} %{SYSLOGBASE2} %{DATA:program} %{NUMBER:pid}'
RAW_SYSLOG_META: '\[meta sequenceId="%{NOTDQUOTE:seq_id}"\]'
name: crowdsecurity/syslog-logs
nodes:
- grok:
#this is a named regular expression. grok patterns can be kept into separate files for readability
pattern: "^%{SYSLOGLINE}"
#This is the field of the `Event` to which the regexp should be applied
apply_on: Line.Raw
- grok:
#a second pattern for unparsed syslog lines, as saw in opnsense
pattern: '%{RAW_SYSLOG_PREFIX} - %{RAW_SYSLOG_META} %{GREEDYDATA:message}'
apply_on: Line.Raw
#if the node was successfull, statics will be applied.
statics:
- meta: machine
expression: evt.Parsed.logsource
- parsed: "logsource"
value: "syslog"
# syslog date can be in two different fields (one of hte assignment will fail)
- target: evt.StrTime
expression: evt.Parsed.timestamp
- target: evt.StrTime
expression: evt.Parsed.timestamp8601
- meta: datasource_path
expression: evt.Line.Src
- meta: datasource_type
expression: evt.Line.Module
---
#if it's not syslog, the type is the progname
filter: "evt.Line.Labels.type != 'syslog'"
onsuccess: next_stage
name: crowdsecurity/non-syslog
#debug: true
statics:
- parsed: message
expression: evt.Line.Raw
- parsed: program
expression: evt.Line.Labels.type
- meta: datasource_path
expression: evt.Line.Src
- meta: datasource_type
expression: evt.Line.Module

View File

@@ -0,0 +1,26 @@
#contribution by @ltsich
onsuccess: next_stage
debug: false
filter: "evt.Parsed.program == 'dovecot'"
name: crowdsecurity/dovecot-logs
description: "Parse dovecot logs"
nodes:
- grok:
pattern: "%{WORD:protocol}-login: %{DATA:dovecot_login_message}: user=<%{DATA:dovecot_user}>.*, rip=%{IP:dovecot_remote_ip}, lip=%{IP:dovecot_local_ip}"
apply_on: message
- grok:
pattern: "auth-worker\\(%{INT}\\): %{WORD:dovecot_user_backend}\\(%{DATA:dovecot_user},%{IP:dovecot_remote_ip},?%{DATA}\\): (%{DATA}: )?%{DATA:dovecot_login_message}$"
apply_on: message
- grok:
pattern: "auth-worker\\(%{INT}\\): (Info: )?conn unix:auth-worker \\(pid=%{INT},uid=%{INT}\\): auth-worker<%{INT}>: %{WORD:dovecot_user_backend}\\(%{DATA:dovecot_user},%{IP:dovecot_remote_ip},?%{DATA}\\): (%{DATA}: )?%{DATA:dovecot_login_message}$"
apply_on: message
- grok:
pattern: "auth: passwd-file\\(%{DATA:dovecot_user},%{IP:dovecot_remote_ip}\\): (%{DATA}: )?%{DATA:dovecot_login_message}$"
apply_on: message
statics:
- meta: log_type
value: dovecot_logs
- meta: source_ip
expression: "evt.Parsed.dovecot_remote_ip"
- meta: dovecot_login_result
expression: "any(['Authentication failure', 'Password mismatch', 'password mismatch', 'auth failed', 'unknown user'], {evt.Parsed.dovecot_login_message contains #}) ? 'auth_failed' : ''"

View File

@@ -0,0 +1,70 @@
filter: "evt.Parsed.program startsWith 'nginx'"
onsuccess: next_stage
name: crowdsecurity/nginx-logs
description: "Parse nginx access and error logs"
pattern_syntax:
NGCUSTOMUSER: '[a-zA-Z0-9\.\@\-\+_%]+'
nodes:
- grok:
pattern: '(%{IPORHOST:target_fqdn} )?%{IPORHOST:remote_addr} - %{NGCUSTOMUSER:remote_user}? \[%{HTTPDATE:time_local}\] "%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:http_version}" %{NUMBER:status} %{NUMBER:body_bytes_sent} "%{NOTDQUOTE:http_referer}" "%{NOTDQUOTE:http_user_agent}"( %{NUMBER:request_length} %{NUMBER:request_time} \[%{DATA:proxy_upstream_name}\] \[%{DATA:proxy_alternative_upstream_name}\])?'
apply_on: message
statics:
- meta: log_type
value: http_access-log
- target: evt.StrTime
expression: evt.Parsed.time_local
- grok:
# and this one the error log
pattern: '(%{IPORHOST:target_fqdn} )?%{NGINXERRTIME:time} \[%{LOGLEVEL:loglevel}\] %{NONNEGINT:pid}#%{NONNEGINT:tid}: (\*%{NONNEGINT:cid} )?%{GREEDYDATA:message}, client: %{IPORHOST:remote_addr}, server: %{DATA:target_fqdn}, request: "%{WORD:verb} ([^/]+)?%{URIPATHPARAM:request}( HTTP/%{NUMBER:http_version})?", host: "%{IPORHOST}(:%{NONNEGINT})?"'
apply_on: message
statics:
- meta: log_type
value: http_error-log
- target: evt.StrTime
expression: evt.Parsed.time
pattern_syntax:
NO_DOUBLE_QUOTE: '[^"]+'
onsuccess: next_stage
nodes:
- filter: "evt.Parsed.message contains 'was not found in'"
pattern_syntax:
USER_NOT_FOUND: 'user "%{NO_DOUBLE_QUOTE:username}" was not found in "%{NO_DOUBLE_QUOTE}"'
grok:
pattern: '%{USER_NOT_FOUND}'
apply_on: message
statics:
- meta: sub_type
value: "auth_fail"
- meta: username
expression: evt.Parsed.username
- filter: "evt.Parsed.message contains 'password mismatch'"
pattern_syntax:
PASSWORD_MISMATCH: 'user "%{NO_DOUBLE_QUOTE:username}": password mismatch'
grok:
pattern: '%{PASSWORD_MISMATCH}'
apply_on: message
statics:
- meta: sub_type
value: "auth_fail"
- meta: username
expression: evt.Parsed.username
- filter: "evt.Parsed.message contains 'limiting requests, excess'"
statics:
- meta: sub_type
value: "req_limit_exceeded"
# these ones apply for both grok patterns
statics:
- meta: service
value: http
- meta: source_ip
expression: "evt.Parsed.remote_addr"
- meta: http_status
expression: "evt.Parsed.status"
- meta: http_path
expression: "evt.Parsed.request"
- meta: http_verb
expression: "evt.Parsed.verb"
- meta: http_user_agent
expression: "evt.Parsed.http_user_agent"
- meta: target_fqdn
expression: "evt.Parsed.target_fqdn"

View File

@@ -0,0 +1,61 @@
# Copyright (c) 2014, 2015, Rudy Gevaert
# Copyright (c) 2020 Crowdsec
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Some of the groks used here are from https://github.com/rgevaert/grok-patterns/blob/master/grok.d/postfix_patterns
onsuccess: next_stage
filter: "evt.Parsed.program in ['postfix/smtpd','postfix/smtps/smtpd','postfix/submission/smtpd', 'postfix/smtps-haproxy/smtpd', 'postfix/submission-haproxy/smtpd']"
name: crowdsecurity/postfix-logs
pattern_syntax:
POSTFIX_HOSTNAME: '(%{HOSTNAME}|unknown)'
POSTFIX_COMMAND: '(AUTH|STARTTLS|CONNECT|EHLO|HELO|RCPT)'
POSTFIX_ACTION: 'discard|dunno|filter|hold|ignore|info|prepend|redirect|replace|reject|warn'
RELAY: '(?:%{HOSTNAME:remote_host}(?:\[%{IP:remote_addr}\](?::[0-9]+(.[0-9]+)?)?)?)'
description: "Parse postfix logs"
nodes:
- grok:
apply_on: message
pattern: 'lost connection after %{DATA:smtp_response} from %{RELAY}'
statics:
- meta: log_type_enh
value: spam-attempt
- grok:
apply_on: message
pattern: 'warning: %{POSTFIX_HOSTNAME:remote_host}\[%{IP:remote_addr}\]: SASL ((?i)LOGIN|PLAIN|(?:CRAM|DIGEST)-MD5) authentication failed:%{GREEDYDATA:message_failure}'
statics:
- meta: log_type_enh
value: spam-attempt
- grok:
apply_on: message
pattern: 'NOQUEUE: %{POSTFIX_ACTION:action}: %{DATA:command} from %{RELAY}: %{GREEDYDATA:reason}'
statics:
- meta: action
expression: "evt.Parsed.action"
statics:
- meta: service
value: postfix
- meta: source_ip
expression: "evt.Parsed.remote_addr"
- meta: source_hostname
expression: "evt.Parsed.remote_host"
- meta: log_type
value: postfix

View File

@@ -0,0 +1,20 @@
onsuccess: next_stage
filter: "evt.Parsed.program in ['postfix/postscreen', 'haproxy/postscreen']"
name: crowdsecurity/postscreen-logs
pattern_syntax:
POSTSCREEN_PREGREET: 'PREGREET'
POSTSCREEN_PREGREET_TIME_ATTEMPT: '\d+.\d+'
description: "Parse postscreen logs"
nodes:
- grok:
apply_on: message
pattern: '%{POSTSCREEN_PREGREET:pregreet} %{INT:count} after %{POSTSCREEN_PREGREET_TIME_ATTEMPT:time_attempt} from \[%{IP:remote_addr}\]:%{INT:port}: %{GREEDYDATA:message_attempt}'
statics:
- meta: service
value: postscreen
- meta: source_ip
expression: "evt.Parsed.remote_addr"
- meta: pregreet
expression: "evt.Parsed.pregreet"

View File

@@ -0,0 +1,100 @@
onsuccess: next_stage
#debug: true
filter: "evt.Parsed.program == 'sshd'"
name: crowdsecurity/sshd-logs
description: "Parse openSSH logs"
pattern_syntax:
# The IP grok pattern that ships with crowdsec is buggy and does not capture the last digit of an IP if it is the last thing it matches, and the last octet starts with a 2
# https://github.com/crowdsecurity/crowdsec/issues/938
IPv4_WORKAROUND: (?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)
IP_WORKAROUND: (?:%{IPV6}|%{IPv4_WORKAROUND})
SSHD_AUTH_FAIL: 'pam_%{DATA:pam_type}\(sshd:auth\): authentication failure; logname= uid=%{NUMBER:uid}? euid=%{NUMBER:euid}? tty=ssh ruser= rhost=%{IP_WORKAROUND:sshd_client_ip}( %{SPACE}user=%{USERNAME:sshd_invalid_user})?'
SSHD_MAGIC_VALUE_FAILED: 'Magic value check failed \(\d+\) on obfuscated handshake from %{IP_WORKAROUND:sshd_client_ip} port \d+'
SSHD_INVALID_USER: 'Invalid user\s*%{USERNAME:sshd_invalid_user}? from %{IP_WORKAROUND:sshd_client_ip}( port \d+)?'
SSHD_INVALID_BANNER: 'banner exchange: Connection from %{IP_WORKAROUND:sshd_client_ip} port \d+: invalid format'
SSHD_PREAUTH_AUTHENTICATING_USER: 'Connection closed by (authenticating|invalid) user %{USERNAME:sshd_invalid_user} %{IP_WORKAROUND:sshd_client_ip} port \d+ \[preauth\]'
#following: https://github.com/crowdsecurity/crowdsec/issues/1201 - some scanners behave differently and trigger this one
SSHD_PREAUTH_AUTHENTICATING_USER_ALT: 'Disconnected from (authenticating|invalid) user %{USERNAME:sshd_invalid_user} %{IP_WORKAROUND:sshd_client_ip} port \d+ \[preauth\]'
SSHD_BAD_KEY_NEGOTIATION: 'Unable to negotiate with %{IP_WORKAROUND:sshd_client_ip} port \d+: no matching (host key type|key exchange method|MAC) found.'
nodes:
- grok:
name: "SSHD_FAIL"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_PREAUTH_AUTHENTICATING_USER_ALT"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_PREAUTH_AUTHENTICATING_USER"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_DISC_PREAUTH"
apply_on: message
- grok:
name: "SSHD_BAD_VERSION"
apply_on: message
- grok:
name: "SSHD_INVALID_USER"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_INVALID_BANNER"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: extra_log_type
value: ssh_bad_banner
- grok:
name: "SSHD_USER_FAIL"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_AUTH_FAIL"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_MAGIC_VALUE_FAILED"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_BAD_KEY_NEGOTIATION"
apply_on: message
statics:
- meta: log_type
value: ssh_bad_keyexchange
statics:
- meta: service
value: ssh
- meta: source_ip
expression: "evt.Parsed.sshd_client_ip"

View File

@@ -0,0 +1,69 @@
# co-authored with gmelodie (https://github.com/gmelodie)
name: crowdsecurity/traefik-logs
description: "Parse Traefik access logs"
filter: "evt.Parsed.program startsWith 'traefik'"
#debug: true
onsuccess: next_stage
pattern_syntax:
TRAEFIK_ROUTER: '(%{USER}@%{URIHOST}|\-)'
TRAEFIK_SERVER_URL: '(%{URI}|\-)'
NUMBER_MINUS: '[0-9-]+'
NGCUSTOMUSER: '[a-zA-Z0-9\.\@\-\+_%]+'
NGINXACCESS2: '%{IPORHOST:remote_addr} - %{NGCUSTOMUSER:remote_user} \[%{HTTPDATE:time_local}\] "%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:http_version}" %{NUMBER_MINUS:status} %{NUMBER_MINUS:body_bytes_sent} "%{NOTDQUOTE:http_referer}" "%{NOTDQUOTE:http_user_agent}"'
nodes:
- grok: # CLF parser
pattern: '%{NGINXACCESS2} %{NUMBER:number_of_requests_received_since_traefik_started} "%{TRAEFIK_ROUTER:traefik_router_name}" "%{TRAEFIK_SERVER_URL:traefik_server_url}" %{NUMBER:request_duration_in_ms}ms'
apply_on: message
- filter: UnmarshalJSON(evt.Line.Raw, evt.Unmarshaled, "traefik") in ["", nil]
statics:
- parsed: remote_addr
expression: evt.Unmarshaled.traefik.ClientHost
- parsed: dest_addr
## Split dest_addr to get IP only as this is original functionality
expression: Split(evt.Unmarshaled.traefik.ClientAddr, ':')[0]
- parsed: request_addr
expression: evt.Unmarshaled.traefik.RequestAddr
- parsed: service_addr
## Split service_addr to get IP only as this is original functionality
expression: "evt.Unmarshaled.traefik.ServiceAddr != nil ? Split(evt.Unmarshaled.traefik.ServiceAddr, ':')[0] : nil"
- parsed: http_user_agent
expression: evt.Unmarshaled.traefik["request_User-Agent"] ## We have to access via [] as the key contains a dash
- parsed: body_bytes_sent
## We have to check if DownstreamContentSize is nil, as it will cause EXPR error if it is
expression: "evt.Unmarshaled.traefik.DownstreamContentSize != nil ? int(evt.Unmarshaled.traefik.DownstreamContentSize) : nil"
- parsed: request_duration_in_ms
expression: int(evt.Unmarshaled.traefik.Duration)
- parsed: traefik_router_name
expression: evt.Unmarshaled.traefik.RouterName
- parsed: time_local
expression: evt.Unmarshaled.traefik.time
- parsed: verb
expression: evt.Unmarshaled.traefik.RequestMethod
- parsed: request
expression: evt.Unmarshaled.traefik.RequestPath
- parsed: http_version
## Split http_version to get version only as this is original functionality
expression: Split(evt.Unmarshaled.traefik.RequestProtocol, '/')[1]
- parsed: status
expression: int(evt.Unmarshaled.traefik.DownstreamStatus)
statics:
- meta: service
value: http
- meta: http_status
expression: "evt.Parsed.status"
- meta: http_path
expression: "evt.Parsed.request"
- meta: user
expression: "evt.Parsed.remote_user"
- meta: source_ip
expression: "evt.Parsed.remote_addr"
- meta: http_user_agent
expression: "evt.Parsed.http_user_agent"
- meta: log_type
value: http_access-log
- target: evt.StrTime
expression: "evt.Parsed.time_local"
- meta: traefik_router_name
expression: "evt.Parsed.traefik_router_name"
- meta: http_verb
expression: "evt.Parsed.verb"

View File

@@ -0,0 +1,11 @@
filter: "evt.StrTime != ''"
name: crowdsecurity/dateparse-enrich
#debug: true
#it's a hack lol
statics:
- method: ParseDate
expression: evt.StrTime
- target: MarshaledTime
expression: evt.Enriched.MarshaledTime
- meta: timestamp
expression: evt.Enriched.MarshaledTime

View File

@@ -0,0 +1,27 @@
filter: "'source_ip' in evt.Meta"
name: crowdsecurity/geoip-enrich
description: "Populate event with geoloc info : as, country, coords, source range."
data:
- source_url: https://crowdsec-statics-assets.s3-eu-west-1.amazonaws.com/GeoLite2-City.mmdb
dest_file: GeoLite2-City.mmdb
- source_url: https://crowdsec-statics-assets.s3-eu-west-1.amazonaws.com/GeoLite2-ASN.mmdb
dest_file: GeoLite2-ASN.mmdb
statics:
- method: GeoIpCity
expression: evt.Meta.source_ip
- meta: IsoCode
expression: evt.Enriched.IsoCode
- meta: IsInEU
expression: evt.Enriched.IsInEU
- meta: GeoCoords
expression: evt.Enriched.GeoCoords
- method: GeoIpASN
expression: evt.Meta.source_ip
- meta: ASNNumber
expression: evt.Enriched.ASNNumber
- meta: ASNOrg
expression: evt.Enriched.ASNOrg
- method: IpToRange
expression: evt.Meta.source_ip
- meta: SourceRange
expression: evt.Enriched.SourceRange

View File

@@ -0,0 +1,33 @@
filter: "evt.Meta.service == 'http' && evt.Meta.log_type in ['http_access-log', 'http_error-log']"
description: "Parse more Specifically HTTP logs, such as HTTP Code, HTTP path, HTTP args and if its a static ressource"
name: crowdsecurity/http-logs
pattern_syntax:
DIR: "^.*/"
FILE: "[^/].*?"
EXT: "\\.[^.]*$|$"
nodes:
- statics:
- parsed: "impact_completion"
# the value of a field can as well be determined as the result of an expression
expression: "evt.Meta.http_status in ['404', '403', '502'] ? 'false' : 'true'"
- target: evt.Parsed.static_ressource
value: 'false'
# let's split the path?query if possible
- grok:
pattern: "^%{GREEDYDATA:request}\\?%{GREEDYDATA:http_args}$"
apply_on: request
# this is another node, with its own pattern_syntax
- #debug: true
grok:
pattern: "%{DIR:file_dir}(%{FILE:file_frag}%{EXT:file_ext})?"
apply_on: request
statics:
- meta: http_path
expression: "evt.Parsed.http_path"
# meta af
- meta: http_args_len
expression: "len(evt.Parsed.http_args)"
- parsed: file_name
expression: evt.Parsed.file_frag + evt.Parsed.file_ext
- parsed: static_ressource
expression: "Upper(evt.Parsed.file_ext) in ['.JPG', '.CSS', '.JS', '.JPEG', '.PNG', '.SVG', '.MAP', '.ICO', '.OTF', '.GIF', '.MP3', '.MP4', '.WOFF', '.WOFF2', '.TTF', '.OTF', '.EOT', '.WEBP', '.WAV', '.GZ', '.BROTLI', '.BVR', '.TS', '.BMP'] ? 'true' : 'false'"

View File

@@ -0,0 +1,14 @@
name: crowdsecurity/whitelists
description: "Whitelist events from private ipv4 addresses"
whitelist:
reason: "private ipv4/ipv6 ip/ranges"
ip:
- "127.0.0.1"
- "::1"
cidr:
- "192.168.0.0/16"
- "10.0.0.0/8"
- "172.16.0.0/12"
# expression:
# - "'foo.com' in evt.Meta.source_ip.reverse"

View File

@@ -0,0 +1,9 @@
onsuccess: next_stage
filter: "evt.Overflow.Alert.Remediation == true && evt.Overflow.Alert.GetScope() == 'Ip'"
name: crowdsecurity/rdns
description: "Lookup the DNS associated to the source IP only for overflows"
statics:
- method: reverse_dns
expression: evt.Overflow.Alert.Source.IP
- meta: reverse_dns
expression: evt.Enriched.reverse_dns

View File

@@ -0,0 +1,14 @@
name: crowdsecurity/cdn-whitelist
description: "Whitelist CDN providers"
whitelist:
reason: "CDN provider"
expression:
- "any(File('cloudflare_ips.txt'), { IpInRange(evt.Overflow.Alert.Source.IP ,#)})"
- "any(File('cloudflare_ip6s.txt'), { IpInRange(evt.Overflow.Alert.Source.IP ,#)})"
data:
- source_url: https://www.cloudflare.com/ips-v4
dest_file: cloudflare_ips.txt
type: string
- source_url: https://www.cloudflare.com/ips-v6
dest_file: cloudflare_ip6s.txt
type: string

View File

@@ -0,0 +1,18 @@
name: crowdsecurity/seo-bots-whitelist
description: "Whitelist good search engine crawlers"
whitelist:
reason: "good bots (search engine crawlers)"
expression:
- "any(File('rdns_seo_bots.txt'), { len(#) > 0 && evt.Enriched.reverse_dns endsWith #})"
- "RegexpInFile(evt.Enriched.reverse_dns, 'rdns_seo_bots.regex')"
- "any(File('ip_seo_bots.txt'), { len(#) > 0 && IpInRange(evt.Overflow.Alert.Source.IP ,#)})"
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/whitelists/benign_bots/search_engine_crawlers/rdns_seo_bots.txt
dest_file: rdns_seo_bots.txt
type: string
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/whitelists/benign_bots/search_engine_crawlers/rnds_seo_bots.regex
dest_file: rdns_seo_bots.regex
type: regexp
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/whitelists/benign_bots/search_engine_crawlers/ip_seo_bots.txt
dest_file: ip_seo_bots.txt
type: string

View File

@@ -0,0 +1,11 @@
type: trigger
format: 2.0
name: crowdsecurity/CVE-2019-18935
description: "Detect Telerik CVE-2019-18935 exploitation attempts"
filter: |
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && Upper(QueryUnescape(evt.Meta.http_path)) startsWith Upper('/Telerik.Web.UI.WebResource.axd?type=rau')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,10 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-26134
description: "Detect CVE-2022-26134 exploits"
filter: "Upper(PathUnescape(evt.Meta.http_path)) contains Upper('@java.lang.Runtime@getRuntime().exec(')"
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,10 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-35914
description: "Detect CVE-2022-35914 exploits"
filter: "Upper(evt.Meta.http_path) contains Upper('/vendor/htmlawed/htmlawed/htmLawedTest.php')"
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,18 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-37042
description: "Detect CVE-2022-37042 exploits"
filter: |
(
Upper(evt.Meta.http_path) contains Upper('/service/extension/backup/mboximport?account-name=admin&ow=2&no-switch=1&append=1') ||
Upper(evt.Meta.http_path) contains Upper('/service/extension/backup/mboximport?account-name=admin&account-status=1&ow=cmd')
)
and evt.Meta.http_status startsWith ('40') and
Upper(evt.Meta.http_verb) == 'POST'
blackhole: 2m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,11 @@
type: trigger
name: crowdsecurity/fortinet-cve-2022-40684
description: "Detect cve-2022-40684 exploitation attempts"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
Upper(evt.Meta.http_path) startsWith Upper('/api/v2/cmdb/system/admin/') and Lower(evt.Parsed.http_user_agent) == 'report runner'
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,13 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-41082
description: "Detect CVE-2022-41082 exploits"
filter: |
Upper(evt.Meta.http_path) contains Upper('/autodiscover/autodiscover.json') &&
Upper(evt.Parsed.http_args) contains Upper('powershell')
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,14 @@
type: leaky
name: crowdsecurity/CVE-2022-41697
description: "Detect CVE-2022-41697 enumeration"
filter: |
Upper(evt.Meta.http_path) contains Upper('/ghost/api/admin/session') &&
Upper(evt.Parsed.verb) == 'POST' &&
evt.Meta.http_status == '404'
leakspeed: "10s"
capacity: 5
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,17 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-42889
description: "Detect CVE-2022-42889 exploits (Text4Shell)"
filter: |
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${script:javascript:java.lang.Runtime.getRuntime().exec(')
or
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${script:js:java.lang.Runtime.getRuntime().exec(')
or
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${url:UTF-8:')
or
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${dns:address|')
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,15 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-44877
description: "Detect CVE-2022-44877 exploits"
filter: |
Lower(evt.Meta.http_path) contains '/index.php' &&
Upper(evt.Parsed.verb) == 'POST' &&
evt.Meta.http_status == '302' &&
Lower(evt.Parsed.http_args) matches 'login=.*[$|%24][\\(|%28].*[\\)|%29]'
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,29 @@
type: leaky
name: crowdsecurity/CVE-2022-46169-bf
description: "Detect CVE-2022-46169 brute forcing"
filter: |
Upper(evt.Meta.http_path) contains Upper('/remote_agent.php') &&
Upper(evt.Parsed.verb) == 'GET' &&
Lower(evt.Parsed.http_args) contains 'host_id' &&
Lower(evt.Parsed.http_args) contains 'local_data_ids'
leakspeed: "10s"
capacity: 5
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true
---
type: trigger
name: crowdsecurity/CVE-2022-46169-cmd
description: "Detect CVE-2022-46169 cmd injection"
filter: |
Upper(evt.Meta.http_path) contains Upper('/remote_agent.php') &&
Upper(evt.Parsed.verb) == 'GET' &&
Lower(evt.Parsed.http_args) contains 'action=polldata' &&
Lower(evt.Parsed.http_args) matches 'poller_id=.*(;|%3b)'
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,23 @@
type: trigger
format: 2.0
#debug: true
name: crowdsecurity/apache_log4j2_cve-2021-44228
description: "Detect cve-2021-44228 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(
any(File("log4j2_cve_2021_44228.txt"), { Upper(evt.Meta.http_path) contains Upper(#)})
or
any(File("log4j2_cve_2021_44228.txt"), { Upper(evt.Parsed.http_user_agent) contains Upper(#)})
or
any(File("log4j2_cve_2021_44228.txt"), { Upper(evt.Parsed.http_referer) contains Upper(#)})
)
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/log4j2_cve_2021_44228.txt
dest_file: log4j2_cve_2021_44228.txt
type: string
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,13 @@
#contribution by @ltsich
type: leaky
name: crowdsecurity/dovecot-spam
description: "detect errors on dovecot"
debug: false
filter: "evt.Meta.log_type == 'dovecot_logs' && evt.Meta.dovecot_login_result == 'auth_failed'"
groupby: evt.Meta.source_ip
capacity: 3
leakspeed: "360s"
blackhole: 5m
labels:
type: scan
remediation: true

View File

@@ -0,0 +1,16 @@
type: trigger
format: 2.0
name: crowdsecurity/f5-big-ip-cve-2020-5902
description: "Detect cve-2020-5902 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(
Upper(evt.Meta.http_path) matches Upper('/tmui/login.jsp/..;/tmui/[^.]+.jsp\\?(fileName|command|directoryPath|tabId)=')
or
Upper(evt.Meta.http_path) matches Upper('/tmui/login.jsp/%2E%2E;/tmui/[^.]+.jsp\\?(fileName|command|directoryPath|tabId)=')
)
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,12 @@
type: trigger
format: 2.0
name: crowdsecurity/fortinet-cve-2018-13379
description: "Detect cve-2018-13379 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
Upper(evt.Meta.http_path) contains Upper('/remote/fgt_lang?lang=/../../../..//////////dev/cmdb/sslvpn_websession')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,14 @@
type: trigger
format: 2.0
name: crowdsecurity/grafana-cve-2021-43798
description: "Detect cve-2021-43798 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(Upper(evt.Meta.http_path) matches '/PUBLIC/PLUGINS/[^/]+/../[./]+/'
or
Upper(evt.Meta.http_path) matches '/PUBLIC/PLUGINS/[^/]+/%2E%2E/[%2E/]+/')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,18 @@
type: leaky
#debug: true
name: crowdsecurity/http-backdoors-attempts
description: "Detect attempt to common backdoors"
filter: 'evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("backdoors.txt"), { evt.Parsed.file_name == #})'
groupby: "evt.Meta.source_ip"
distinct: evt.Parsed.file_name
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/backdoors.txt
dest_file: backdoors.txt
type: string
capacity: 1
leakspeed: 5s
blackhole: 5m
labels:
service: http
type: discovery
remediation: true

View File

@@ -0,0 +1,20 @@
type: leaky
format: 2.0
#debug: true
name: crowdsecurity/http-bad-user-agent
description: "Detect bad user-agents"
filter: 'evt.Meta.log_type in ["http_access-log", "http_error-log"] && RegexpInFile(evt.Parsed.http_user_agent, "bad_user_agents.regex.txt")'
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/bad_user_agents.regex.txt
dest_file: bad_user_agents.regex.txt
type: regexp
strategy: LRU
size: 40
ttl: 10s
capacity: 1
leakspeed: 1m
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: scan
remediation: true

View File

@@ -0,0 +1,16 @@
type: leaky
name: crowdsecurity/http-crawl-non_statics
description: "Detect aggressive crawl from single ip"
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && evt.Parsed.static_ressource == 'false' && evt.Parsed.verb in ['GET', 'HEAD']"
distinct: "evt.Parsed.file_name"
leakspeed: 0.5s
capacity: 40
#debug: true
#this limits the memory cache (and event_sequences in output) to five events
cache_size: 5
groupby: "evt.Meta.source_ip + '/' + evt.Parsed.target_fqdn"
blackhole: 1m
labels:
service: http
type: crawl
remediation: true

View File

@@ -0,0 +1,15 @@
type: trigger
format: 2.0
#debug: true
name: crowdsecurity/http-cve-2021-41773
description: "cve-2021-41773"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(Upper(evt.Meta.http_path) contains "/.%2E/.%2E/"
or
Upper(evt.Meta.http_path) contains "/%2E%2E/%2E%2E")
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: scan
remediation: true

View File

@@ -0,0 +1,14 @@
type: trigger
format: 2.0
#debug: true
#this is getting funny, it's the third patch on top of cve-2021-41773
name: crowdsecurity/http-cve-2021-42013
description: "cve-2021-42013"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
Upper(evt.Meta.http_path) contains "/%%32%65%%32%65/"
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: scan
remediation: true

View File

@@ -0,0 +1,44 @@
# 404 scan
type: leaky
#debug: true
name: crowdsecurity/http-generic-bf
description: "Detect generic http brute force"
filter: "evt.Meta.service == 'http' && evt.Meta.sub_type == 'auth_fail'"
groupby: evt.Meta.source_ip
capacity: 5
leakspeed: "10s"
blackhole: 1m
labels:
service: http
type: bf
remediation: true
---
# Generic 401 Authorization Errors
type: leaky
#debug: true
name: LePresidente/http-generic-401-bf
description: "Detect generic 401 Authorization error brute force"
filter: "evt.Meta.log_type == 'http_access-log' && evt.Parsed.verb == 'POST' && evt.Meta.http_status == '401'"
groupby: evt.Meta.source_ip
capacity: 5
leakspeed: "10s"
blackhole: 1m
labels:
service: http
type: bf
remediation: true
---
# Generic 403 Forbidden (Authorization) Errors
type: leaky
#debug: true
name: LePresidente/http-generic-403-bf
description: "Detect generic 403 Forbidden (Authorization) error brute force"
filter: "evt.Meta.log_type == 'http_access-log' && evt.Parsed.verb == 'POST' && evt.Meta.http_status == '403'"
groupby: evt.Meta.source_ip
capacity: 5
leakspeed: "10s"
blackhole: 1m
labels:
service: http
type: bf
remediation: true

View File

@@ -0,0 +1,10 @@
type: trigger
name: crowdsecurity/http-open-proxy
description: "Detect scan for open proxy"
#apache returns 405, nginx 400
filter: "evt.Meta.log_type == 'http_access-log' && evt.Meta.http_status in ['400','405'] && (evt.Parsed.verb == 'CONNECT' || evt.Parsed.request matches '^http[s]?://')"
blackhole: 2m
labels:
service: http
type: scan
remediation: true

View File

@@ -0,0 +1,20 @@
# path traversal probing
type: leaky
#debug: true
name: crowdsecurity/http-path-traversal-probing
description: "Detect path traversal attempt"
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && any(File('http_path_traversal.txt'),{evt.Meta.http_path contains #})"
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/path_traversal.txt
dest_file: http_path_traversal.txt
type: string
groupby: "evt.Meta.source_ip"
distinct: "evt.Meta.http_path"
capacity: 3
reprocess: true
leakspeed: 10s
blackhole: 2m
labels:
service: http
type: scan
remediation: true

View File

@@ -0,0 +1,16 @@
# 404 scan
type: leaky
#debug: true
name: crowdsecurity/http-probing
description: "Detect site scanning/probing from a single ip"
filter: "evt.Meta.service == 'http' && evt.Meta.http_status in ['404', '403', '400'] && evt.Parsed.static_ressource == 'false'"
groupby: "evt.Meta.source_ip + '/' + evt.Parsed.target_fqdn"
distinct: "evt.Meta.http_path"
capacity: 10
reprocess: true
leakspeed: "10s"
blackhole: 5m
labels:
service: http
type: scan
remediation: true

View File

@@ -0,0 +1,19 @@
type: leaky
format: 2.0
#debug: true
name: crowdsecurity/http-sensitive-files
description: "Detect attempt to access to sensitive files (.log, .db ..) or folders (.git)"
filter: 'evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("sensitive_data.txt"), { evt.Parsed.request endsWith #})'
groupby: "evt.Meta.source_ip"
distinct: evt.Parsed.request
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/sensitive_data.txt
dest_file: sensitive_data.txt
type: string
capacity: 4
leakspeed: 5s
blackhole: 5m
labels:
service: http
type: discovery
remediation: true

View File

@@ -0,0 +1,20 @@
type: leaky
#requires at least 2.0 because it's using the 'data' section and the 'Upper' expr helper
format: 2.0
name: crowdsecurity/http-sqli-probbing-detection
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/sqli_probe_patterns.txt
dest_file: sqli_probe_patterns.txt
type: string
description: "A scenario that detects SQL injection probing with minimal false positives"
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && any(File('sqli_probe_patterns.txt'), {Upper(evt.Parsed.http_args) contains Upper(#)})"
groupby: evt.Meta.source_ip
capacity: 10
leakspeed: 1s
blackhole: 5m
#low false positives approach : we require distinct payloads to avoid false positives
distinct: evt.Parsed.http_args
labels:
service: http
type: sqli_probing
remediation: true

View File

@@ -0,0 +1,20 @@
type: leaky
#requires at least 2.0 because it's using the 'data' section and the 'Upper' expr helper
format: 2.0
name: crowdsecurity/http-xss-probbing
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/xss_probe_patterns.txt
dest_file: xss_probe_patterns.txt
type: string
description: "A scenario that detects XSS probing with minimal false positives"
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && any(File('xss_probe_patterns.txt'), {Upper(evt.Parsed.http_args) contains Upper(#)})"
groupby: evt.Meta.source_ip
capacity: 5
leakspeed: 1s
blackhole: 5m
#low false positives approach : we require distinct payloads to avoid false positives
distinct: evt.Parsed.http_args
labels:
service: http
type: xss_probing
remediation: true

View File

@@ -0,0 +1,16 @@
type: trigger
format: 2.0
#debug: true
name: crowdsecurity/jira_cve-2021-26086
description: "Detect Atlassian Jira CVE-2021-26086 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("jira_cve_2021-26086.txt"), {Upper(evt.Meta.http_path) contains Upper(#)})
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/jira_cve_2021-26086.txt
dest_file: jira_cve_2021-26086.txt
type: string
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,13 @@
type: trigger
format: 2.0
name: crowdsecurity/netgear_rce
description: "Detect Netgear RCE DGN1000/DGN220 exploitation attempts"
filter: |
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && Lower(QueryUnescape(evt.Meta.http_path)) startsWith Lower('/setup.cgi?next_file=netgear.cfg&todo=syscmd&cmd=')
groupby: "evt.Meta.source_ip"
blackhole: 2m
references:
- "https://www.exploit-db.com/exploits/25978"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,13 @@
type: leaky
#debug: true
name: crowdsecurity/nginx-req-limit-exceeded
description: "Detects IPs which violate nginx's user set request limit."
filter: evt.Meta.sub_type == 'req_limit_exceeded'
leakspeed: "60s"
capacity: 5
groupby: evt.Meta.source_ip
blackhole: 5m
labels:
service: nginx
type: bruteforce
remediation: true

View File

@@ -0,0 +1,33 @@
# postfix spam
type: leaky
name: crowdsecurity/postfix-spam
description: "Detect spammers"
filter: "evt.Meta.log_type_enh == 'spam-attempt' || evt.Meta.log_type == 'postfix' && evt.Meta.action == 'reject'"
leakspeed: "10s"
references:
- https://en.wikipedia.org/wiki/Spamming
capacity: 5
groupby: evt.Meta.source_ip
blackhole: 1m
reprocess: false
labels:
service: postfix
type: bruteforce
remediation: true
---
# postfix spam
type: trigger
name: crowdsecurity/postscreen-rbl
description: "Detect spammers"
filter: "evt.Meta.service == 'postscreen' && evt.Meta.pregreet == 'PREGREET'"
leakspeed: "10s"
references:
- https://en.wikipedia.org/wiki/Spamming
groupby: evt.Meta.source_ip
blackhole: 1m
reprocess: false
labels:
service: postscreen
type: bruteforce
remediation: true

View File

@@ -0,0 +1,14 @@
type: trigger
format: 2.0
name: crowdsecurity/pulse-secure-sslvpn-cve-2019-11510
description: "Detect cve-2019-11510 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(Upper(evt.Meta.http_path) matches Upper('/dana-na/../dana/html5acc/guacamole/../../../../../../../[^?]+\\?/dana/html5acc/guacamole/')
or
Upper(evt.Meta.http_path) matches Upper('/dana-na/%2E%2E/dana/html5acc/guacamole/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/[^?]+\\?/dana/html5acc/guacamole/'))
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,12 @@
type: trigger
format: 2.0
name: crowdsecurity/spring4shell_cve-2022-22965
description: "Detect cve-2022-22965 probing"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(Upper(evt.Meta.http_path) contains 'CLASS.MODULE.CLASSLOADER.')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,32 @@
# ssh bruteforce
type: leaky
name: crowdsecurity/ssh-bf
description: "Detect ssh bruteforce"
filter: "evt.Meta.log_type == 'ssh_failed-auth'"
leakspeed: "10s"
references:
- http://wikipedia.com/ssh-bf-is-bad
capacity: 5
groupby: evt.Meta.source_ip
blackhole: 1m
reprocess: true
labels:
service: ssh
type: bruteforce
remediation: true
---
# ssh user-enum
type: leaky
name: crowdsecurity/ssh-bf_user-enum
description: "Detect ssh user enum bruteforce"
filter: evt.Meta.log_type == 'ssh_failed-auth'
groupby: evt.Meta.source_ip
distinct: evt.Meta.target_user
leakspeed: 10s
capacity: 5
blackhole: 1m
labels:
service: ssh
type: bruteforce
remediation: true

View File

@@ -0,0 +1,32 @@
# ssh bruteforce
type: leaky
name: crowdsecurity/ssh-slow-bf
description: "Detect slow ssh bruteforce"
filter: "evt.Meta.log_type == 'ssh_failed-auth'"
leakspeed: "60s"
references:
- http://wikipedia.com/ssh-bf-is-bad
capacity: 10
groupby: evt.Meta.source_ip
blackhole: 1m
reprocess: true
labels:
service: ssh
type: bruteforce
remediation: true
---
# ssh user-enum
type: leaky
name: crowdsecurity/ssh-slow-bf_user-enum
description: "Detect slow ssh user enum bruteforce"
filter: evt.Meta.log_type == 'ssh_failed-auth'
groupby: evt.Meta.source_ip
distinct: evt.Meta.target_user
leakspeed: 60s
capacity: 10
blackhole: 1m
labels:
service: ssh
type: bruteforce
remediation: true

View File

@@ -0,0 +1,16 @@
type: trigger
format: 2.0
#debug: true
name: crowdsecurity/thinkphp-cve-2018-20062
description: "Detect ThinkPHP CVE-2018-20062 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("thinkphp_cve_2018-20062.txt"), {Upper(evt.Meta.http_path) matches Upper(#)})
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/thinkphp_cve_2018-20062.txt
dest_file: thinkphp_cve_2018-20062.txt
type: string
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,11 @@
type: trigger
format: 2.0
name: crowdsecurity/vmware-cve-2022-22954
description: "Detect Vmware CVE-2022-22954 exploitation attempts"
filter: |
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && Upper(QueryUnescape(evt.Meta.http_path)) startsWith Upper('/catalog-portal/ui/oauth/verify?error=&deviceUdid=${"freemarker.template.utility.Execute"?new()(')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,11 @@
type: trigger
format: 2.0
name: crowdsecurity/vmware-vcenter-vmsa-2021-0027
description: "Detect VMSA-2021-0027 exploitation attemps"
filter: |
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && evt.Meta.http_path matches '/ui/vcav-bootstrap/rest/vcav-providers/provider-logo\\?url=(file|http)'
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,12 @@
#contributed by ltsich
type: trigger
name: ltsich/http-w00tw00t
description: "detect w00tw00t"
debug: false
filter: "evt.Meta.log_type == 'http_access-log' && evt.Parsed.file_name contains 'w00tw00t.at.ISC.SANS.DFind'"
groupby: evt.Meta.source_ip
blackhole: 5m
labels:
service: http
type: scan
remediation: true

View File

@@ -0,0 +1,3 @@
url: http://0.0.0.0:8080
login: localhost
password: Q8A8aV6bUtj50QbzwtbJczButlAaKmmGafn784Go1ERjXtNS9mwQ4XHJNQ9294VL

View File

@@ -0,0 +1,45 @@
type: email # Don't change
name: email_default # Must match the registered plugin in the profile
# One of "trace", "debug", "info", "warn", "error", "off"
log_level: info
# group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s"
# group_threshold: # Amount of alerts that triggers a message before <group_wait> has expired, eg "10"
# max_retry: # Number of attempts to relay messages to plugins in case of error
timeout: 20s # Time to wait for response from the plugin before considering the attempt a failure, eg "10s"
#-------------------------
# plugin-specific options
# The following template receives a list of models.Alert objects
# The output goes in the email message body
format: |
{{range . -}}
{{$alert := . -}}
{{range .Decisions -}}
<html><body><p><a href=https://www.whois.com/whois/{{.Value}}>{{.Value}}</a> will get <b>{{.Type}}</b> for next <b>{{.Duration}}</b> for triggering <b>{{.Scenario}}</b> on machine <b>{{$alert.MachineID}}</b>.</p> <p><a href=https://app.crowdsec.net/cti/{{.Value}}>CrowdSec CTI</a></p></body></html>
{{end -}}
{{end -}}
smtp_host: # example: smtp.gmail.com
smtp_username: # Replace with your actual username
smtp_password: # Replace with your actual password
smtp_port: # Common values are any of [25, 465, 587, 2525]
auth_type: # Valid choices are "none", "crammd5", "login", "plain"
sender_name: "CrowdSec"
sender_email: # example: foo@gmail.com
email_subject: "CrowdSec Notification"
receiver_emails:
# - email1@gmail.com
# - email2@gmail.com
# One of "ssltls", "starttls", "none"
encryption_type: ssltls
---
# type: email
# name: email_second_notification
# ...

View File

@@ -0,0 +1,36 @@
type: http # Don't change
name: http_default # Must match the registered plugin in the profile
# One of "trace", "debug", "info", "warn", "error", "off"
log_level: info
# group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s"
# group_threshold: # Amount of alerts that triggers a message before <group_wait> has expired, eg "10"
# max_retry: # Number of attempts to relay messages to plugins in case of error
# timeout: # Time to wait for response from the plugin before considering the attempt a failure, eg "10s"
#-------------------------
# plugin-specific options
# The following template receives a list of models.Alert objects
# The output goes in the http request body
format: |
{{.|toJson}}
# The plugin will make requests to this url, eg: https://www.example.com/
url: <HTTP_url>
# Any of the http verbs: "POST", "GET", "PUT"...
method: POST
# headers:
# Authorization: token 0x64312313
# skip_tls_verification: # true or false. Default is false
---
# type: http
# name: http_second_notification
# ...

View File

@@ -0,0 +1,36 @@
type: slack # Don't change
name: slack_default # Must match the registered plugin in the profile
# One of "trace", "debug", "info", "warn", "error", "off"
log_level: info
# group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s"
# group_threshold: # Amount of alerts that triggers a message before <group_wait> has expired, eg "10"
# max_retry: # Number of attempts to relay messages to plugins in case of error
# timeout: # Time to wait for response from the plugin before considering the attempt a failure, eg "10s"
#-------------------------
# plugin-specific options
# The following template receives a list of models.Alert objects
# The output goes in the slack message
format: |
{{range . -}}
{{$alert := . -}}
{{range .Decisions -}}
{{if $alert.Source.Cn -}}
:flag-{{$alert.Source.Cn}}: <https://www.whois.com/whois/{{.Value}}|{{.Value}}> will get {{.Type}} for next {{.Duration}} for triggering {{.Scenario}} on machine '{{$alert.MachineID}}'. <https://app.crowdsec.net/cti/{{.Value}}|CrowdSec CTI>{{end}}
{{if not $alert.Source.Cn -}}
:pirate_flag: <https://www.whois.com/whois/{{.Value}}|{{.Value}}> will get {{.Type}} for next {{.Duration}} for triggering {{.Scenario}} on machine '{{$alert.MachineID}}'. <https://app.crowdsec.net/cti/{{.Value}}|CrowdSec CTI>{{end}}
{{end -}}
{{end -}}
webhook: <WEBHOOK_URL>
---
# type: slack
# name: slack_second_notification
# ...

Some files were not shown because too many files have changed in this diff Show More