Initial commit

This commit is contained in:
root
2023-08-09 14:01:28 +02:00
commit f4efbc7a63
199 changed files with 13338 additions and 0 deletions

View File

@@ -0,0 +1,28 @@
# Service Crowdsec
SERVICES_CROWDSEC_CONTAINER_NAME=crowdsec
SERVICES_CROWDSEC_HOSTNAME=crowdsec
SERVICES_CROWDSEC_IMAGE=crowdsecurity/crowdsec
SERVICES_CROWDSEC_IMAGE_VERSION=latest
SERVICES_CROWDSEC_NETWORKS_CROWDSEC_IPV4=172.31.254.254
# Service Traefik
SERVICES_TRAEFIK_CONTAINER_NAME=traefik
SERVICES_TRAEFIK_HOSTNAME=traefik
SERVICES_TRAEFIK_IMAGE=traefik
SERVICES_TRAEFIK_IMAGE_VERSION=2.10
SERVICES_TRAEFIK_LABELS_TRAEFIK_HOST=`traefik.prothmann.com`
SERVICES_TRAEFIK_NETWORKS_CROWDSEC_IPV4=172.31.254.253
SERVICES_TRAEFIK_NETWORKS_PROXY_IPV4=172.30.255.254
# Service Traefik Crowdsec Bouncer
SERVICES_TRAEFIK_CROWDSEC_BOUNCER_CONTAINER_NAME=traefik_crowdsec_bouncer
SERVICES_TRAEFIK_CROWDSEC_BOUNCER_HOSTNAME=traefik-crowdsec-bouncer
SERVICES_TRAEFIK_CROWDSEC_BOUNCER_IMAGE=fbonalair/traefik-crowdsec-bouncer
SERVICES_TRAEFIK_CROWDSEC_BOUNCER_IMAGE_VERSION=latest
SERVICES_TRAEFIK_CROWDSEC_BOUNCER_NETWORKS_CROWDSEC_IPV4=172.31.254.252
# Netzwerkeinstellungen
NETWORKS_PROXY_NAME=proxy
NETWORKS_PROXY_SUBNET_IPV4=172.30.0.0/16
NETWORKS_CROWDSEC_NAME=crowdsec
NETWORKS_CROWDSEC_SUBNET_IPV4=172.31.0.0/16

View File

@@ -0,0 +1 @@
https://goneuland.de/traefik-v2-3-reverse-proxy-mit-crowdsec-im-stack-einrichten/#32_docker-composeyml_anlegen

View File

@@ -0,0 +1,2 @@
PGID="1000"
COLLECTIONS="crowdsecurity/traefik crowdsecurity/http-cve crowdsecurity/whitelist-good-actors crowdsecurity/postfix crowdsecurity/dovecot crowdsecurity/nginx"

View File

@@ -0,0 +1,4 @@
# Access-Token damit Bouncer und CrowdSec kommunizieren können
CROWDSEC_BOUNCER_API_KEY=2af497632319f6d35623c43b69cd7c15
# Hostname mit richtigem Port von CrowdSec
CROWDSEC_AGENT_HOST=${SERVICES_CROWDSEC_HOSTNAME}:8080

View File

@@ -0,0 +1,11 @@
filenames:
- /var/log/auth.log
- /var/log/syslog
labels:
type: syslog
---
filenames:
- /var/log/traefik/*.log
labels:
type: traefik
---

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/base-http-scenarios.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/dovecot.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/http-cve.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/nginx.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/postfix.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/traefik.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/collections/crowdsecurity/whitelist-good-actors.yaml

View File

@@ -0,0 +1,49 @@
common:
daemonize: false
log_media: stdout
log_level: info
log_dir: /var/log/
working_dir: .
config_paths:
config_dir: /etc/crowdsec/
data_dir: /var/lib/crowdsec/data/
simulation_path: /etc/crowdsec/simulation.yaml
hub_dir: /etc/crowdsec/hub/
index_path: /etc/crowdsec/hub/.index.json
notification_dir: /etc/crowdsec/notifications/
plugin_dir: /usr/local/lib/crowdsec/plugins/
crowdsec_service:
acquisition_path: /etc/crowdsec/acquis.yaml
acquisition_dir: /etc/crowdsec/acquis.d
parser_routines: 1
plugin_config:
user: nobody
group: nobody
cscli:
output: human
db_config:
log_level: info
type: sqlite
db_path: /var/lib/crowdsec/data/crowdsec.db
flush:
max_items: 5000
max_age: 7d
use_wal: false
api:
client:
insecure_skip_verify: false
credentials_path: /etc/crowdsec/local_api_credentials.yaml
server:
log_level: info
listen_uri: 0.0.0.0:8080
profiles_path: /etc/crowdsec/profiles.yaml
trusted_ips: # IP ranges, or IPs which can have admin API access
- 127.0.0.1
- ::1
online_client: # Central API credentials (to push signals and receive bad IPs)
credentials_path: /etc/crowdsec//online_api_credentials.yaml
prometheus:
enabled: true
level: full
listen_addr: 0.0.0.0
listen_port: 6060

View File

@@ -0,0 +1,4 @@
share_manual_decisions: false
share_custom: true
share_tainted: true
share_context: false

View File

@@ -0,0 +1,47 @@
common:
daemonize: true
log_media: stdout
log_level: info
working_dir: .
config_paths:
config_dir: ./config
data_dir: ./data/
notification_dir: ./config/notifications/
plugin_dir: ./plugins/
#simulation_path: /etc/crowdsec/config/simulation.yaml
#hub_dir: /etc/crowdsec/hub/
#index_path: ./config/hub/.index.json
crowdsec_service:
acquisition_path: ./config/acquis.yaml
parser_routines: 1
plugin_config:
user: $USER # plugin process would be ran on behalf of this user
group: $USER # plugin process would be ran on behalf of this group
cscli:
output: human
db_config:
type: sqlite
db_path: ./data/crowdsec.db
user: root
password: crowdsec
db_name: crowdsec
host: "172.17.0.2"
port: 3306
flush:
#max_items: 10000
#max_age: 168h
api:
client:
credentials_path: ./config/local_api_credentials.yaml
server:
#insecure_skip_verify: true
listen_uri: 127.0.0.1:8081
profiles_path: ./config/profiles.yaml
tls:
#cert_file: ./cert.pem
#key_file: ./key.pem
online_client: # Central API
credentials_path: ./config/online_api_credentials.yaml
prometheus:
enabled: true
level: full

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,25 @@
parsers:
- crowdsecurity/http-logs
scenarios:
- crowdsecurity/http-crawl-non_statics
- crowdsecurity/http-probing
- crowdsecurity/http-bad-user-agent
- crowdsecurity/http-path-traversal-probing
- crowdsecurity/http-sensitive-files
- crowdsecurity/http-sqli-probing
- crowdsecurity/http-xss-probing
- crowdsecurity/http-backdoors-attempts
- ltsich/http-w00tw00t
- crowdsecurity/http-generic-bf
- crowdsecurity/http-open-proxy
collections:
- crowdsecurity/http-cve
description: "http common : scanners detection"
author: crowdsecurity
tags:
- linux
- http
- crawl
- scan

View File

@@ -0,0 +1,10 @@
parsers:
- crowdsecurity/dovecot-logs
scenarios:
- crowdsecurity/dovecot-spam
description: "dovecot support : parser and spammer detection"
author: crowdsecurity
tags:
- linux
- spam
- bruteforce

View File

@@ -0,0 +1,30 @@
scenarios:
- crowdsecurity/http-cve-2021-41773
- crowdsecurity/http-cve-2021-42013
- crowdsecurity/grafana-cve-2021-43798
- crowdsecurity/vmware-vcenter-vmsa-2021-0027
- crowdsecurity/fortinet-cve-2018-13379
- crowdsecurity/pulse-secure-sslvpn-cve-2019-11510
- crowdsecurity/f5-big-ip-cve-2020-5902
- crowdsecurity/thinkphp-cve-2018-20062
- crowdsecurity/apache_log4j2_cve-2021-44228
- crowdsecurity/jira_cve-2021-26086
- crowdsecurity/spring4shell_cve-2022-22965
- crowdsecurity/vmware-cve-2022-22954
- crowdsecurity/CVE-2022-37042
- crowdsecurity/CVE-2022-41082
- crowdsecurity/CVE-2022-35914
- crowdsecurity/CVE-2022-40684
- crowdsecurity/CVE-2022-26134
- crowdsecurity/CVE-2022-42889
- crowdsecurity/CVE-2022-41697
- crowdsecurity/CVE-2022-46169
- crowdsecurity/CVE-2022-44877
- crowdsecurity/CVE-2019-18935
- crowdsecurity/netgear_rce
author: crowdsecurity
tags:
- web
- exploit
- cve
- http

View File

@@ -0,0 +1,11 @@
parsers:
- crowdsecurity/syslog-logs
- crowdsecurity/geoip-enrich
- crowdsecurity/dateparse-enrich
collections:
- crowdsecurity/sshd
description: "core linux support : syslog+geoip+ssh"
author: crowdsecurity
tags:
- linux

View File

@@ -0,0 +1,15 @@
parsers:
#generic post-parsing of http stuff
- crowdsecurity/nginx-logs
collections:
- crowdsecurity/base-http-scenarios
scenarios:
- crowdsecurity/nginx-req-limit-exceeded
description: "nginx support : parser and generic http scenarios"
author: crowdsecurity
tags:
- linux
- nginx
- crawl
- scan

View File

@@ -0,0 +1,11 @@
parsers:
- crowdsecurity/postfix-logs
- crowdsecurity/postscreen-logs
scenarios:
- crowdsecurity/postfix-spam
description: "postfix support : parser and spammer detection"
author: crowdsecurity
tags:
- linux
- spam
- bruteforce

View File

@@ -0,0 +1,12 @@
parsers:
- crowdsecurity/sshd-logs
scenarios:
- crowdsecurity/ssh-bf
- crowdsecurity/ssh-slow-bf
description: "sshd support : parser and brute-force detection"
author: crowdsecurity
tags:
- linux
- ssh
- bruteforce

View File

@@ -0,0 +1,12 @@
# co-authored with gmelodie (https://github.com/gmelodie)
parsers:
- crowdsecurity/traefik-logs
collections:
- crowdsecurity/base-http-scenarios
description: "traefik support: parser and generic http scenarios"
author: crowdsecurity
tags:
- traefik
- http
- bruteforce

View File

@@ -0,0 +1,10 @@
postoverflows:
- crowdsecurity/seo-bots-whitelist
- crowdsecurity/cdn-whitelist
- crowdsecurity/rdns
description: "Good actors whitelists"
author: crowdsecurity
tags:
- whitelist
- bots
- partners

View File

@@ -0,0 +1,19 @@
filter: "evt.Line.Labels.type == 'containerd'"
onsuccess: next_stage
name: crowdsecurity/cri-logs
description: CRI logging format parser
nodes:
- grok:
pattern: "^%{TIMESTAMP_ISO8601:cri_timestamp} %{WORD:stream} %{WORD:logtag} %{GREEDYDATA:message}"
apply_on: Line.Raw
statics:
- parsed: "logsource"
value: "cri"
- target: evt.StrTime
expression: evt.Parsed.cri_timestamp
- parsed: program
expression: evt.Line.Labels.program
- meta: datasource_path
expression: evt.Line.Src
- meta: datasource_type
expression: evt.Line.Module

View File

@@ -0,0 +1,12 @@
#If it's docker, we are going to extract log line from it
filter: "evt.Line.Labels.type == 'docker'"
onsuccess: next_stage
name: crowdsecurity/docker-logs
description: docker json logs parser
statics:
- target: evt.StrTime
expression: JsonExtract(evt.Line.Raw, "time")
- parsed: message
expression: JsonExtractUnescape(evt.Line.Raw, "log")
- parsed: program
expression: evt.Line.Labels.program

View File

@@ -0,0 +1,48 @@
#If it's syslog, we are going to extract progname from it
filter: "evt.Line.Labels.type == 'syslog'"
onsuccess: next_stage
pattern_syntax:
RAW_SYSLOG_PREFIX: '^<%{NUMBER:stuff1}>%{NUMBER:stuff2} %{SYSLOGBASE2} %{DATA:program} %{NUMBER:pid}'
RAW_SYSLOG_META: '\[meta sequenceId="%{NOTDQUOTE:seq_id}"\]'
name: crowdsecurity/syslog-logs
nodes:
- grok:
#this is a named regular expression. grok patterns can be kept into separate files for readability
pattern: "^%{SYSLOGLINE}"
#This is the field of the `Event` to which the regexp should be applied
apply_on: Line.Raw
- grok:
#a second pattern for unparsed syslog lines, as saw in opnsense
pattern: '%{RAW_SYSLOG_PREFIX} - %{RAW_SYSLOG_META} %{GREEDYDATA:message}'
apply_on: Line.Raw
#if the node was successfull, statics will be applied.
statics:
- meta: machine
expression: evt.Parsed.logsource
- parsed: "logsource"
value: "syslog"
# syslog date can be in two different fields (one of hte assignment will fail)
- target: evt.StrTime
expression: evt.Parsed.timestamp
- target: evt.StrTime
expression: evt.Parsed.timestamp8601
- meta: datasource_path
expression: evt.Line.Src
- meta: datasource_type
expression: evt.Line.Module
---
#if it's not syslog, the type is the progname
filter: "evt.Line.Labels.type != 'syslog'"
onsuccess: next_stage
name: crowdsecurity/non-syslog
#debug: true
statics:
- parsed: message
expression: evt.Line.Raw
- parsed: program
expression: evt.Line.Labels.type
- meta: datasource_path
expression: evt.Line.Src
- meta: datasource_type
expression: evt.Line.Module

View File

@@ -0,0 +1,26 @@
#contribution by @ltsich
onsuccess: next_stage
debug: false
filter: "evt.Parsed.program == 'dovecot'"
name: crowdsecurity/dovecot-logs
description: "Parse dovecot logs"
nodes:
- grok:
pattern: "%{WORD:protocol}-login: %{DATA:dovecot_login_message}: user=<%{DATA:dovecot_user}>.*, rip=%{IP:dovecot_remote_ip}, lip=%{IP:dovecot_local_ip}"
apply_on: message
- grok:
pattern: "auth-worker\\(%{INT}\\): %{WORD:dovecot_user_backend}\\(%{DATA:dovecot_user},%{IP:dovecot_remote_ip},?%{DATA}\\): (%{DATA}: )?%{DATA:dovecot_login_message}$"
apply_on: message
- grok:
pattern: "auth-worker\\(%{INT}\\): (Info: )?conn unix:auth-worker \\(pid=%{INT},uid=%{INT}\\): auth-worker<%{INT}>: %{WORD:dovecot_user_backend}\\(%{DATA:dovecot_user},%{IP:dovecot_remote_ip},?%{DATA}\\): (%{DATA}: )?%{DATA:dovecot_login_message}$"
apply_on: message
- grok:
pattern: "auth: passwd-file\\(%{DATA:dovecot_user},%{IP:dovecot_remote_ip}\\): (%{DATA}: )?%{DATA:dovecot_login_message}$"
apply_on: message
statics:
- meta: log_type
value: dovecot_logs
- meta: source_ip
expression: "evt.Parsed.dovecot_remote_ip"
- meta: dovecot_login_result
expression: "any(['Authentication failure', 'Password mismatch', 'password mismatch', 'auth failed', 'unknown user'], {evt.Parsed.dovecot_login_message contains #}) ? 'auth_failed' : ''"

View File

@@ -0,0 +1,70 @@
filter: "evt.Parsed.program startsWith 'nginx'"
onsuccess: next_stage
name: crowdsecurity/nginx-logs
description: "Parse nginx access and error logs"
pattern_syntax:
NGCUSTOMUSER: '[a-zA-Z0-9\.\@\-\+_%]+'
nodes:
- grok:
pattern: '(%{IPORHOST:target_fqdn} )?%{IPORHOST:remote_addr} - %{NGCUSTOMUSER:remote_user}? \[%{HTTPDATE:time_local}\] "%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:http_version}" %{NUMBER:status} %{NUMBER:body_bytes_sent} "%{NOTDQUOTE:http_referer}" "%{NOTDQUOTE:http_user_agent}"( %{NUMBER:request_length} %{NUMBER:request_time} \[%{DATA:proxy_upstream_name}\] \[%{DATA:proxy_alternative_upstream_name}\])?'
apply_on: message
statics:
- meta: log_type
value: http_access-log
- target: evt.StrTime
expression: evt.Parsed.time_local
- grok:
# and this one the error log
pattern: '(%{IPORHOST:target_fqdn} )?%{NGINXERRTIME:time} \[%{LOGLEVEL:loglevel}\] %{NONNEGINT:pid}#%{NONNEGINT:tid}: (\*%{NONNEGINT:cid} )?%{GREEDYDATA:message}, client: %{IPORHOST:remote_addr}, server: %{DATA:target_fqdn}, request: "%{WORD:verb} ([^/]+)?%{URIPATHPARAM:request}( HTTP/%{NUMBER:http_version})?", host: "%{IPORHOST}(:%{NONNEGINT})?"'
apply_on: message
statics:
- meta: log_type
value: http_error-log
- target: evt.StrTime
expression: evt.Parsed.time
pattern_syntax:
NO_DOUBLE_QUOTE: '[^"]+'
onsuccess: next_stage
nodes:
- filter: "evt.Parsed.message contains 'was not found in'"
pattern_syntax:
USER_NOT_FOUND: 'user "%{NO_DOUBLE_QUOTE:username}" was not found in "%{NO_DOUBLE_QUOTE}"'
grok:
pattern: '%{USER_NOT_FOUND}'
apply_on: message
statics:
- meta: sub_type
value: "auth_fail"
- meta: username
expression: evt.Parsed.username
- filter: "evt.Parsed.message contains 'password mismatch'"
pattern_syntax:
PASSWORD_MISMATCH: 'user "%{NO_DOUBLE_QUOTE:username}": password mismatch'
grok:
pattern: '%{PASSWORD_MISMATCH}'
apply_on: message
statics:
- meta: sub_type
value: "auth_fail"
- meta: username
expression: evt.Parsed.username
- filter: "evt.Parsed.message contains 'limiting requests, excess'"
statics:
- meta: sub_type
value: "req_limit_exceeded"
# these ones apply for both grok patterns
statics:
- meta: service
value: http
- meta: source_ip
expression: "evt.Parsed.remote_addr"
- meta: http_status
expression: "evt.Parsed.status"
- meta: http_path
expression: "evt.Parsed.request"
- meta: http_verb
expression: "evt.Parsed.verb"
- meta: http_user_agent
expression: "evt.Parsed.http_user_agent"
- meta: target_fqdn
expression: "evt.Parsed.target_fqdn"

View File

@@ -0,0 +1,61 @@
# Copyright (c) 2014, 2015, Rudy Gevaert
# Copyright (c) 2020 Crowdsec
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Some of the groks used here are from https://github.com/rgevaert/grok-patterns/blob/master/grok.d/postfix_patterns
onsuccess: next_stage
filter: "evt.Parsed.program in ['postfix/smtpd','postfix/smtps/smtpd','postfix/submission/smtpd', 'postfix/smtps-haproxy/smtpd', 'postfix/submission-haproxy/smtpd']"
name: crowdsecurity/postfix-logs
pattern_syntax:
POSTFIX_HOSTNAME: '(%{HOSTNAME}|unknown)'
POSTFIX_COMMAND: '(AUTH|STARTTLS|CONNECT|EHLO|HELO|RCPT)'
POSTFIX_ACTION: 'discard|dunno|filter|hold|ignore|info|prepend|redirect|replace|reject|warn'
RELAY: '(?:%{HOSTNAME:remote_host}(?:\[%{IP:remote_addr}\](?::[0-9]+(.[0-9]+)?)?)?)'
description: "Parse postfix logs"
nodes:
- grok:
apply_on: message
pattern: 'lost connection after %{DATA:smtp_response} from %{RELAY}'
statics:
- meta: log_type_enh
value: spam-attempt
- grok:
apply_on: message
pattern: 'warning: %{POSTFIX_HOSTNAME:remote_host}\[%{IP:remote_addr}\]: SASL ((?i)LOGIN|PLAIN|(?:CRAM|DIGEST)-MD5) authentication failed:%{GREEDYDATA:message_failure}'
statics:
- meta: log_type_enh
value: spam-attempt
- grok:
apply_on: message
pattern: 'NOQUEUE: %{POSTFIX_ACTION:action}: %{DATA:command} from %{RELAY}: %{GREEDYDATA:reason}'
statics:
- meta: action
expression: "evt.Parsed.action"
statics:
- meta: service
value: postfix
- meta: source_ip
expression: "evt.Parsed.remote_addr"
- meta: source_hostname
expression: "evt.Parsed.remote_host"
- meta: log_type
value: postfix

View File

@@ -0,0 +1,20 @@
onsuccess: next_stage
filter: "evt.Parsed.program in ['postfix/postscreen', 'haproxy/postscreen']"
name: crowdsecurity/postscreen-logs
pattern_syntax:
POSTSCREEN_PREGREET: 'PREGREET'
POSTSCREEN_PREGREET_TIME_ATTEMPT: '\d+.\d+'
description: "Parse postscreen logs"
nodes:
- grok:
apply_on: message
pattern: '%{POSTSCREEN_PREGREET:pregreet} %{INT:count} after %{POSTSCREEN_PREGREET_TIME_ATTEMPT:time_attempt} from \[%{IP:remote_addr}\]:%{INT:port}: %{GREEDYDATA:message_attempt}'
statics:
- meta: service
value: postscreen
- meta: source_ip
expression: "evt.Parsed.remote_addr"
- meta: pregreet
expression: "evt.Parsed.pregreet"

View File

@@ -0,0 +1,100 @@
onsuccess: next_stage
#debug: true
filter: "evt.Parsed.program == 'sshd'"
name: crowdsecurity/sshd-logs
description: "Parse openSSH logs"
pattern_syntax:
# The IP grok pattern that ships with crowdsec is buggy and does not capture the last digit of an IP if it is the last thing it matches, and the last octet starts with a 2
# https://github.com/crowdsecurity/crowdsec/issues/938
IPv4_WORKAROUND: (?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)
IP_WORKAROUND: (?:%{IPV6}|%{IPv4_WORKAROUND})
SSHD_AUTH_FAIL: 'pam_%{DATA:pam_type}\(sshd:auth\): authentication failure; logname= uid=%{NUMBER:uid}? euid=%{NUMBER:euid}? tty=ssh ruser= rhost=%{IP_WORKAROUND:sshd_client_ip}( %{SPACE}user=%{USERNAME:sshd_invalid_user})?'
SSHD_MAGIC_VALUE_FAILED: 'Magic value check failed \(\d+\) on obfuscated handshake from %{IP_WORKAROUND:sshd_client_ip} port \d+'
SSHD_INVALID_USER: 'Invalid user\s*%{USERNAME:sshd_invalid_user}? from %{IP_WORKAROUND:sshd_client_ip}( port \d+)?'
SSHD_INVALID_BANNER: 'banner exchange: Connection from %{IP_WORKAROUND:sshd_client_ip} port \d+: invalid format'
SSHD_PREAUTH_AUTHENTICATING_USER: 'Connection closed by (authenticating|invalid) user %{USERNAME:sshd_invalid_user} %{IP_WORKAROUND:sshd_client_ip} port \d+ \[preauth\]'
#following: https://github.com/crowdsecurity/crowdsec/issues/1201 - some scanners behave differently and trigger this one
SSHD_PREAUTH_AUTHENTICATING_USER_ALT: 'Disconnected from (authenticating|invalid) user %{USERNAME:sshd_invalid_user} %{IP_WORKAROUND:sshd_client_ip} port \d+ \[preauth\]'
SSHD_BAD_KEY_NEGOTIATION: 'Unable to negotiate with %{IP_WORKAROUND:sshd_client_ip} port \d+: no matching (host key type|key exchange method|MAC) found.'
nodes:
- grok:
name: "SSHD_FAIL"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_PREAUTH_AUTHENTICATING_USER_ALT"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_PREAUTH_AUTHENTICATING_USER"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_DISC_PREAUTH"
apply_on: message
- grok:
name: "SSHD_BAD_VERSION"
apply_on: message
- grok:
name: "SSHD_INVALID_USER"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_INVALID_BANNER"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: extra_log_type
value: ssh_bad_banner
- grok:
name: "SSHD_USER_FAIL"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_AUTH_FAIL"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_MAGIC_VALUE_FAILED"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_BAD_KEY_NEGOTIATION"
apply_on: message
statics:
- meta: log_type
value: ssh_bad_keyexchange
statics:
- meta: service
value: ssh
- meta: source_ip
expression: "evt.Parsed.sshd_client_ip"

View File

@@ -0,0 +1,69 @@
# co-authored with gmelodie (https://github.com/gmelodie)
name: crowdsecurity/traefik-logs
description: "Parse Traefik access logs"
filter: "evt.Parsed.program startsWith 'traefik'"
#debug: true
onsuccess: next_stage
pattern_syntax:
TRAEFIK_ROUTER: '(%{USER}@%{URIHOST}|\-)'
TRAEFIK_SERVER_URL: '(%{URI}|\-)'
NUMBER_MINUS: '[0-9-]+'
NGCUSTOMUSER: '[a-zA-Z0-9\.\@\-\+_%]+'
NGINXACCESS2: '%{IPORHOST:remote_addr} - %{NGCUSTOMUSER:remote_user} \[%{HTTPDATE:time_local}\] "%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:http_version}" %{NUMBER_MINUS:status} %{NUMBER_MINUS:body_bytes_sent} "%{NOTDQUOTE:http_referer}" "%{NOTDQUOTE:http_user_agent}"'
nodes:
- grok: # CLF parser
pattern: '%{NGINXACCESS2} %{NUMBER:number_of_requests_received_since_traefik_started} "%{TRAEFIK_ROUTER:traefik_router_name}" "%{TRAEFIK_SERVER_URL:traefik_server_url}" %{NUMBER:request_duration_in_ms}ms'
apply_on: message
- filter: UnmarshalJSON(evt.Line.Raw, evt.Unmarshaled, "traefik") in ["", nil]
statics:
- parsed: remote_addr
expression: evt.Unmarshaled.traefik.ClientHost
- parsed: dest_addr
## Split dest_addr to get IP only as this is original functionality
expression: Split(evt.Unmarshaled.traefik.ClientAddr, ':')[0]
- parsed: request_addr
expression: evt.Unmarshaled.traefik.RequestAddr
- parsed: service_addr
## Split service_addr to get IP only as this is original functionality
expression: "evt.Unmarshaled.traefik.ServiceAddr != nil ? Split(evt.Unmarshaled.traefik.ServiceAddr, ':')[0] : nil"
- parsed: http_user_agent
expression: evt.Unmarshaled.traefik["request_User-Agent"] ## We have to access via [] as the key contains a dash
- parsed: body_bytes_sent
## We have to check if DownstreamContentSize is nil, as it will cause EXPR error if it is
expression: "evt.Unmarshaled.traefik.DownstreamContentSize != nil ? int(evt.Unmarshaled.traefik.DownstreamContentSize) : nil"
- parsed: request_duration_in_ms
expression: int(evt.Unmarshaled.traefik.Duration)
- parsed: traefik_router_name
expression: evt.Unmarshaled.traefik.RouterName
- parsed: time_local
expression: evt.Unmarshaled.traefik.time
- parsed: verb
expression: evt.Unmarshaled.traefik.RequestMethod
- parsed: request
expression: evt.Unmarshaled.traefik.RequestPath
- parsed: http_version
## Split http_version to get version only as this is original functionality
expression: Split(evt.Unmarshaled.traefik.RequestProtocol, '/')[1]
- parsed: status
expression: int(evt.Unmarshaled.traefik.DownstreamStatus)
statics:
- meta: service
value: http
- meta: http_status
expression: "evt.Parsed.status"
- meta: http_path
expression: "evt.Parsed.request"
- meta: user
expression: "evt.Parsed.remote_user"
- meta: source_ip
expression: "evt.Parsed.remote_addr"
- meta: http_user_agent
expression: "evt.Parsed.http_user_agent"
- meta: log_type
value: http_access-log
- target: evt.StrTime
expression: "evt.Parsed.time_local"
- meta: traefik_router_name
expression: "evt.Parsed.traefik_router_name"
- meta: http_verb
expression: "evt.Parsed.verb"

View File

@@ -0,0 +1,11 @@
filter: "evt.StrTime != ''"
name: crowdsecurity/dateparse-enrich
#debug: true
#it's a hack lol
statics:
- method: ParseDate
expression: evt.StrTime
- target: MarshaledTime
expression: evt.Enriched.MarshaledTime
- meta: timestamp
expression: evt.Enriched.MarshaledTime

View File

@@ -0,0 +1,27 @@
filter: "'source_ip' in evt.Meta"
name: crowdsecurity/geoip-enrich
description: "Populate event with geoloc info : as, country, coords, source range."
data:
- source_url: https://crowdsec-statics-assets.s3-eu-west-1.amazonaws.com/GeoLite2-City.mmdb
dest_file: GeoLite2-City.mmdb
- source_url: https://crowdsec-statics-assets.s3-eu-west-1.amazonaws.com/GeoLite2-ASN.mmdb
dest_file: GeoLite2-ASN.mmdb
statics:
- method: GeoIpCity
expression: evt.Meta.source_ip
- meta: IsoCode
expression: evt.Enriched.IsoCode
- meta: IsInEU
expression: evt.Enriched.IsInEU
- meta: GeoCoords
expression: evt.Enriched.GeoCoords
- method: GeoIpASN
expression: evt.Meta.source_ip
- meta: ASNNumber
expression: evt.Enriched.ASNNumber
- meta: ASNOrg
expression: evt.Enriched.ASNOrg
- method: IpToRange
expression: evt.Meta.source_ip
- meta: SourceRange
expression: evt.Enriched.SourceRange

View File

@@ -0,0 +1,33 @@
filter: "evt.Meta.service == 'http' && evt.Meta.log_type in ['http_access-log', 'http_error-log']"
description: "Parse more Specifically HTTP logs, such as HTTP Code, HTTP path, HTTP args and if its a static ressource"
name: crowdsecurity/http-logs
pattern_syntax:
DIR: "^.*/"
FILE: "[^/].*?"
EXT: "\\.[^.]*$|$"
nodes:
- statics:
- parsed: "impact_completion"
# the value of a field can as well be determined as the result of an expression
expression: "evt.Meta.http_status in ['404', '403', '502'] ? 'false' : 'true'"
- target: evt.Parsed.static_ressource
value: 'false'
# let's split the path?query if possible
- grok:
pattern: "^%{GREEDYDATA:request}\\?%{GREEDYDATA:http_args}$"
apply_on: request
# this is another node, with its own pattern_syntax
- #debug: true
grok:
pattern: "%{DIR:file_dir}(%{FILE:file_frag}%{EXT:file_ext})?"
apply_on: request
statics:
- meta: http_path
expression: "evt.Parsed.http_path"
# meta af
- meta: http_args_len
expression: "len(evt.Parsed.http_args)"
- parsed: file_name
expression: evt.Parsed.file_frag + evt.Parsed.file_ext
- parsed: static_ressource
expression: "Upper(evt.Parsed.file_ext) in ['.JPG', '.CSS', '.JS', '.JPEG', '.PNG', '.SVG', '.MAP', '.ICO', '.OTF', '.GIF', '.MP3', '.MP4', '.WOFF', '.WOFF2', '.TTF', '.OTF', '.EOT', '.WEBP', '.WAV', '.GZ', '.BROTLI', '.BVR', '.TS', '.BMP'] ? 'true' : 'false'"

View File

@@ -0,0 +1,14 @@
name: crowdsecurity/whitelists
description: "Whitelist events from private ipv4 addresses"
whitelist:
reason: "private ipv4/ipv6 ip/ranges"
ip:
- "127.0.0.1"
- "::1"
cidr:
- "192.168.0.0/16"
- "10.0.0.0/8"
- "172.16.0.0/12"
# expression:
# - "'foo.com' in evt.Meta.source_ip.reverse"

View File

@@ -0,0 +1,9 @@
onsuccess: next_stage
filter: "evt.Overflow.Alert.Remediation == true && evt.Overflow.Alert.GetScope() == 'Ip'"
name: crowdsecurity/rdns
description: "Lookup the DNS associated to the source IP only for overflows"
statics:
- method: reverse_dns
expression: evt.Overflow.Alert.Source.IP
- meta: reverse_dns
expression: evt.Enriched.reverse_dns

View File

@@ -0,0 +1,14 @@
name: crowdsecurity/cdn-whitelist
description: "Whitelist CDN providers"
whitelist:
reason: "CDN provider"
expression:
- "any(File('cloudflare_ips.txt'), { IpInRange(evt.Overflow.Alert.Source.IP ,#)})"
- "any(File('cloudflare_ip6s.txt'), { IpInRange(evt.Overflow.Alert.Source.IP ,#)})"
data:
- source_url: https://www.cloudflare.com/ips-v4
dest_file: cloudflare_ips.txt
type: string
- source_url: https://www.cloudflare.com/ips-v6
dest_file: cloudflare_ip6s.txt
type: string

View File

@@ -0,0 +1,18 @@
name: crowdsecurity/seo-bots-whitelist
description: "Whitelist good search engine crawlers"
whitelist:
reason: "good bots (search engine crawlers)"
expression:
- "any(File('rdns_seo_bots.txt'), { len(#) > 0 && evt.Enriched.reverse_dns endsWith #})"
- "RegexpInFile(evt.Enriched.reverse_dns, 'rdns_seo_bots.regex')"
- "any(File('ip_seo_bots.txt'), { len(#) > 0 && IpInRange(evt.Overflow.Alert.Source.IP ,#)})"
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/whitelists/benign_bots/search_engine_crawlers/rdns_seo_bots.txt
dest_file: rdns_seo_bots.txt
type: string
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/whitelists/benign_bots/search_engine_crawlers/rnds_seo_bots.regex
dest_file: rdns_seo_bots.regex
type: regexp
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/whitelists/benign_bots/search_engine_crawlers/ip_seo_bots.txt
dest_file: ip_seo_bots.txt
type: string

View File

@@ -0,0 +1,11 @@
type: trigger
format: 2.0
name: crowdsecurity/CVE-2019-18935
description: "Detect Telerik CVE-2019-18935 exploitation attempts"
filter: |
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && Upper(QueryUnescape(evt.Meta.http_path)) startsWith Upper('/Telerik.Web.UI.WebResource.axd?type=rau')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,10 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-26134
description: "Detect CVE-2022-26134 exploits"
filter: "Upper(PathUnescape(evt.Meta.http_path)) contains Upper('@java.lang.Runtime@getRuntime().exec(')"
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,10 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-35914
description: "Detect CVE-2022-35914 exploits"
filter: "Upper(evt.Meta.http_path) contains Upper('/vendor/htmlawed/htmlawed/htmLawedTest.php')"
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,18 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-37042
description: "Detect CVE-2022-37042 exploits"
filter: |
(
Upper(evt.Meta.http_path) contains Upper('/service/extension/backup/mboximport?account-name=admin&ow=2&no-switch=1&append=1') ||
Upper(evt.Meta.http_path) contains Upper('/service/extension/backup/mboximport?account-name=admin&account-status=1&ow=cmd')
)
and evt.Meta.http_status startsWith ('40') and
Upper(evt.Meta.http_verb) == 'POST'
blackhole: 2m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,11 @@
type: trigger
name: crowdsecurity/fortinet-cve-2022-40684
description: "Detect cve-2022-40684 exploitation attempts"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
Upper(evt.Meta.http_path) startsWith Upper('/api/v2/cmdb/system/admin/') and Lower(evt.Parsed.http_user_agent) == 'report runner'
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,13 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-41082
description: "Detect CVE-2022-41082 exploits"
filter: |
Upper(evt.Meta.http_path) contains Upper('/autodiscover/autodiscover.json') &&
Upper(evt.Parsed.http_args) contains Upper('powershell')
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,14 @@
type: leaky
name: crowdsecurity/CVE-2022-41697
description: "Detect CVE-2022-41697 enumeration"
filter: |
Upper(evt.Meta.http_path) contains Upper('/ghost/api/admin/session') &&
Upper(evt.Parsed.verb) == 'POST' &&
evt.Meta.http_status == '404'
leakspeed: "10s"
capacity: 5
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,17 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-42889
description: "Detect CVE-2022-42889 exploits (Text4Shell)"
filter: |
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${script:javascript:java.lang.Runtime.getRuntime().exec(')
or
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${script:js:java.lang.Runtime.getRuntime().exec(')
or
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${url:UTF-8:')
or
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${dns:address|')
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,15 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-44877
description: "Detect CVE-2022-44877 exploits"
filter: |
Lower(evt.Meta.http_path) contains '/index.php' &&
Upper(evt.Parsed.verb) == 'POST' &&
evt.Meta.http_status == '302' &&
Lower(evt.Parsed.http_args) matches 'login=.*[$|%24][\\(|%28].*[\\)|%29]'
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,29 @@
type: leaky
name: crowdsecurity/CVE-2022-46169-bf
description: "Detect CVE-2022-46169 brute forcing"
filter: |
Upper(evt.Meta.http_path) contains Upper('/remote_agent.php') &&
Upper(evt.Parsed.verb) == 'GET' &&
Lower(evt.Parsed.http_args) contains 'host_id' &&
Lower(evt.Parsed.http_args) contains 'local_data_ids'
leakspeed: "10s"
capacity: 5
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true
---
type: trigger
name: crowdsecurity/CVE-2022-46169-cmd
description: "Detect CVE-2022-46169 cmd injection"
filter: |
Upper(evt.Meta.http_path) contains Upper('/remote_agent.php') &&
Upper(evt.Parsed.verb) == 'GET' &&
Lower(evt.Parsed.http_args) contains 'action=polldata' &&
Lower(evt.Parsed.http_args) matches 'poller_id=.*(;|%3b)'
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,23 @@
type: trigger
format: 2.0
#debug: true
name: crowdsecurity/apache_log4j2_cve-2021-44228
description: "Detect cve-2021-44228 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(
any(File("log4j2_cve_2021_44228.txt"), { Upper(evt.Meta.http_path) contains Upper(#)})
or
any(File("log4j2_cve_2021_44228.txt"), { Upper(evt.Parsed.http_user_agent) contains Upper(#)})
or
any(File("log4j2_cve_2021_44228.txt"), { Upper(evt.Parsed.http_referer) contains Upper(#)})
)
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/log4j2_cve_2021_44228.txt
dest_file: log4j2_cve_2021_44228.txt
type: string
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,13 @@
#contribution by @ltsich
type: leaky
name: crowdsecurity/dovecot-spam
description: "detect errors on dovecot"
debug: false
filter: "evt.Meta.log_type == 'dovecot_logs' && evt.Meta.dovecot_login_result == 'auth_failed'"
groupby: evt.Meta.source_ip
capacity: 3
leakspeed: "360s"
blackhole: 5m
labels:
type: scan
remediation: true

View File

@@ -0,0 +1,16 @@
type: trigger
format: 2.0
name: crowdsecurity/f5-big-ip-cve-2020-5902
description: "Detect cve-2020-5902 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(
Upper(evt.Meta.http_path) matches Upper('/tmui/login.jsp/..;/tmui/[^.]+.jsp\\?(fileName|command|directoryPath|tabId)=')
or
Upper(evt.Meta.http_path) matches Upper('/tmui/login.jsp/%2E%2E;/tmui/[^.]+.jsp\\?(fileName|command|directoryPath|tabId)=')
)
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,12 @@
type: trigger
format: 2.0
name: crowdsecurity/fortinet-cve-2018-13379
description: "Detect cve-2018-13379 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
Upper(evt.Meta.http_path) contains Upper('/remote/fgt_lang?lang=/../../../..//////////dev/cmdb/sslvpn_websession')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,14 @@
type: trigger
format: 2.0
name: crowdsecurity/grafana-cve-2021-43798
description: "Detect cve-2021-43798 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(Upper(evt.Meta.http_path) matches '/PUBLIC/PLUGINS/[^/]+/../[./]+/'
or
Upper(evt.Meta.http_path) matches '/PUBLIC/PLUGINS/[^/]+/%2E%2E/[%2E/]+/')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,18 @@
type: leaky
#debug: true
name: crowdsecurity/http-backdoors-attempts
description: "Detect attempt to common backdoors"
filter: 'evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("backdoors.txt"), { evt.Parsed.file_name == #})'
groupby: "evt.Meta.source_ip"
distinct: evt.Parsed.file_name
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/backdoors.txt
dest_file: backdoors.txt
type: string
capacity: 1
leakspeed: 5s
blackhole: 5m
labels:
service: http
type: discovery
remediation: true

View File

@@ -0,0 +1,20 @@
type: leaky
format: 2.0
#debug: true
name: crowdsecurity/http-bad-user-agent
description: "Detect bad user-agents"
filter: 'evt.Meta.log_type in ["http_access-log", "http_error-log"] && RegexpInFile(evt.Parsed.http_user_agent, "bad_user_agents.regex.txt")'
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/bad_user_agents.regex.txt
dest_file: bad_user_agents.regex.txt
type: regexp
strategy: LRU
size: 40
ttl: 10s
capacity: 1
leakspeed: 1m
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: scan
remediation: true

View File

@@ -0,0 +1,16 @@
type: leaky
name: crowdsecurity/http-crawl-non_statics
description: "Detect aggressive crawl from single ip"
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && evt.Parsed.static_ressource == 'false' && evt.Parsed.verb in ['GET', 'HEAD']"
distinct: "evt.Parsed.file_name"
leakspeed: 0.5s
capacity: 40
#debug: true
#this limits the memory cache (and event_sequences in output) to five events
cache_size: 5
groupby: "evt.Meta.source_ip + '/' + evt.Parsed.target_fqdn"
blackhole: 1m
labels:
service: http
type: crawl
remediation: true

View File

@@ -0,0 +1,15 @@
type: trigger
format: 2.0
#debug: true
name: crowdsecurity/http-cve-2021-41773
description: "cve-2021-41773"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(Upper(evt.Meta.http_path) contains "/.%2E/.%2E/"
or
Upper(evt.Meta.http_path) contains "/%2E%2E/%2E%2E")
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: scan
remediation: true

View File

@@ -0,0 +1,14 @@
type: trigger
format: 2.0
#debug: true
#this is getting funny, it's the third patch on top of cve-2021-41773
name: crowdsecurity/http-cve-2021-42013
description: "cve-2021-42013"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
Upper(evt.Meta.http_path) contains "/%%32%65%%32%65/"
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: scan
remediation: true

View File

@@ -0,0 +1,44 @@
# 404 scan
type: leaky
#debug: true
name: crowdsecurity/http-generic-bf
description: "Detect generic http brute force"
filter: "evt.Meta.service == 'http' && evt.Meta.sub_type == 'auth_fail'"
groupby: evt.Meta.source_ip
capacity: 5
leakspeed: "10s"
blackhole: 1m
labels:
service: http
type: bf
remediation: true
---
# Generic 401 Authorization Errors
type: leaky
#debug: true
name: LePresidente/http-generic-401-bf
description: "Detect generic 401 Authorization error brute force"
filter: "evt.Meta.log_type == 'http_access-log' && evt.Parsed.verb == 'POST' && evt.Meta.http_status == '401'"
groupby: evt.Meta.source_ip
capacity: 5
leakspeed: "10s"
blackhole: 1m
labels:
service: http
type: bf
remediation: true
---
# Generic 403 Forbidden (Authorization) Errors
type: leaky
#debug: true
name: LePresidente/http-generic-403-bf
description: "Detect generic 403 Forbidden (Authorization) error brute force"
filter: "evt.Meta.log_type == 'http_access-log' && evt.Parsed.verb == 'POST' && evt.Meta.http_status == '403'"
groupby: evt.Meta.source_ip
capacity: 5
leakspeed: "10s"
blackhole: 1m
labels:
service: http
type: bf
remediation: true

View File

@@ -0,0 +1,10 @@
type: trigger
name: crowdsecurity/http-open-proxy
description: "Detect scan for open proxy"
#apache returns 405, nginx 400
filter: "evt.Meta.log_type == 'http_access-log' && evt.Meta.http_status in ['400','405'] && (evt.Parsed.verb == 'CONNECT' || evt.Parsed.request matches '^http[s]?://')"
blackhole: 2m
labels:
service: http
type: scan
remediation: true

View File

@@ -0,0 +1,20 @@
# path traversal probing
type: leaky
#debug: true
name: crowdsecurity/http-path-traversal-probing
description: "Detect path traversal attempt"
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && any(File('http_path_traversal.txt'),{evt.Meta.http_path contains #})"
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/path_traversal.txt
dest_file: http_path_traversal.txt
type: string
groupby: "evt.Meta.source_ip"
distinct: "evt.Meta.http_path"
capacity: 3
reprocess: true
leakspeed: 10s
blackhole: 2m
labels:
service: http
type: scan
remediation: true

View File

@@ -0,0 +1,16 @@
# 404 scan
type: leaky
#debug: true
name: crowdsecurity/http-probing
description: "Detect site scanning/probing from a single ip"
filter: "evt.Meta.service == 'http' && evt.Meta.http_status in ['404', '403', '400'] && evt.Parsed.static_ressource == 'false'"
groupby: "evt.Meta.source_ip + '/' + evt.Parsed.target_fqdn"
distinct: "evt.Meta.http_path"
capacity: 10
reprocess: true
leakspeed: "10s"
blackhole: 5m
labels:
service: http
type: scan
remediation: true

View File

@@ -0,0 +1,19 @@
type: leaky
format: 2.0
#debug: true
name: crowdsecurity/http-sensitive-files
description: "Detect attempt to access to sensitive files (.log, .db ..) or folders (.git)"
filter: 'evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("sensitive_data.txt"), { evt.Parsed.request endsWith #})'
groupby: "evt.Meta.source_ip"
distinct: evt.Parsed.request
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/sensitive_data.txt
dest_file: sensitive_data.txt
type: string
capacity: 4
leakspeed: 5s
blackhole: 5m
labels:
service: http
type: discovery
remediation: true

View File

@@ -0,0 +1,20 @@
type: leaky
#requires at least 2.0 because it's using the 'data' section and the 'Upper' expr helper
format: 2.0
name: crowdsecurity/http-sqli-probbing-detection
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/sqli_probe_patterns.txt
dest_file: sqli_probe_patterns.txt
type: string
description: "A scenario that detects SQL injection probing with minimal false positives"
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && any(File('sqli_probe_patterns.txt'), {Upper(evt.Parsed.http_args) contains Upper(#)})"
groupby: evt.Meta.source_ip
capacity: 10
leakspeed: 1s
blackhole: 5m
#low false positives approach : we require distinct payloads to avoid false positives
distinct: evt.Parsed.http_args
labels:
service: http
type: sqli_probing
remediation: true

View File

@@ -0,0 +1,20 @@
type: leaky
#requires at least 2.0 because it's using the 'data' section and the 'Upper' expr helper
format: 2.0
name: crowdsecurity/http-xss-probbing
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/xss_probe_patterns.txt
dest_file: xss_probe_patterns.txt
type: string
description: "A scenario that detects XSS probing with minimal false positives"
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && any(File('xss_probe_patterns.txt'), {Upper(evt.Parsed.http_args) contains Upper(#)})"
groupby: evt.Meta.source_ip
capacity: 5
leakspeed: 1s
blackhole: 5m
#low false positives approach : we require distinct payloads to avoid false positives
distinct: evt.Parsed.http_args
labels:
service: http
type: xss_probing
remediation: true

View File

@@ -0,0 +1,16 @@
type: trigger
format: 2.0
#debug: true
name: crowdsecurity/jira_cve-2021-26086
description: "Detect Atlassian Jira CVE-2021-26086 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("jira_cve_2021-26086.txt"), {Upper(evt.Meta.http_path) contains Upper(#)})
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/jira_cve_2021-26086.txt
dest_file: jira_cve_2021-26086.txt
type: string
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,13 @@
type: trigger
format: 2.0
name: crowdsecurity/netgear_rce
description: "Detect Netgear RCE DGN1000/DGN220 exploitation attempts"
filter: |
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && Lower(QueryUnescape(evt.Meta.http_path)) startsWith Lower('/setup.cgi?next_file=netgear.cfg&todo=syscmd&cmd=')
groupby: "evt.Meta.source_ip"
blackhole: 2m
references:
- "https://www.exploit-db.com/exploits/25978"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,13 @@
type: leaky
#debug: true
name: crowdsecurity/nginx-req-limit-exceeded
description: "Detects IPs which violate nginx's user set request limit."
filter: evt.Meta.sub_type == 'req_limit_exceeded'
leakspeed: "60s"
capacity: 5
groupby: evt.Meta.source_ip
blackhole: 5m
labels:
service: nginx
type: bruteforce
remediation: true

View File

@@ -0,0 +1,33 @@
# postfix spam
type: leaky
name: crowdsecurity/postfix-spam
description: "Detect spammers"
filter: "evt.Meta.log_type_enh == 'spam-attempt' || evt.Meta.log_type == 'postfix' && evt.Meta.action == 'reject'"
leakspeed: "10s"
references:
- https://en.wikipedia.org/wiki/Spamming
capacity: 5
groupby: evt.Meta.source_ip
blackhole: 1m
reprocess: false
labels:
service: postfix
type: bruteforce
remediation: true
---
# postfix spam
type: trigger
name: crowdsecurity/postscreen-rbl
description: "Detect spammers"
filter: "evt.Meta.service == 'postscreen' && evt.Meta.pregreet == 'PREGREET'"
leakspeed: "10s"
references:
- https://en.wikipedia.org/wiki/Spamming
groupby: evt.Meta.source_ip
blackhole: 1m
reprocess: false
labels:
service: postscreen
type: bruteforce
remediation: true

View File

@@ -0,0 +1,14 @@
type: trigger
format: 2.0
name: crowdsecurity/pulse-secure-sslvpn-cve-2019-11510
description: "Detect cve-2019-11510 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(Upper(evt.Meta.http_path) matches Upper('/dana-na/../dana/html5acc/guacamole/../../../../../../../[^?]+\\?/dana/html5acc/guacamole/')
or
Upper(evt.Meta.http_path) matches Upper('/dana-na/%2E%2E/dana/html5acc/guacamole/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/[^?]+\\?/dana/html5acc/guacamole/'))
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,12 @@
type: trigger
format: 2.0
name: crowdsecurity/spring4shell_cve-2022-22965
description: "Detect cve-2022-22965 probing"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(Upper(evt.Meta.http_path) contains 'CLASS.MODULE.CLASSLOADER.')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,32 @@
# ssh bruteforce
type: leaky
name: crowdsecurity/ssh-bf
description: "Detect ssh bruteforce"
filter: "evt.Meta.log_type == 'ssh_failed-auth'"
leakspeed: "10s"
references:
- http://wikipedia.com/ssh-bf-is-bad
capacity: 5
groupby: evt.Meta.source_ip
blackhole: 1m
reprocess: true
labels:
service: ssh
type: bruteforce
remediation: true
---
# ssh user-enum
type: leaky
name: crowdsecurity/ssh-bf_user-enum
description: "Detect ssh user enum bruteforce"
filter: evt.Meta.log_type == 'ssh_failed-auth'
groupby: evt.Meta.source_ip
distinct: evt.Meta.target_user
leakspeed: 10s
capacity: 5
blackhole: 1m
labels:
service: ssh
type: bruteforce
remediation: true

View File

@@ -0,0 +1,32 @@
# ssh bruteforce
type: leaky
name: crowdsecurity/ssh-slow-bf
description: "Detect slow ssh bruteforce"
filter: "evt.Meta.log_type == 'ssh_failed-auth'"
leakspeed: "60s"
references:
- http://wikipedia.com/ssh-bf-is-bad
capacity: 10
groupby: evt.Meta.source_ip
blackhole: 1m
reprocess: true
labels:
service: ssh
type: bruteforce
remediation: true
---
# ssh user-enum
type: leaky
name: crowdsecurity/ssh-slow-bf_user-enum
description: "Detect slow ssh user enum bruteforce"
filter: evt.Meta.log_type == 'ssh_failed-auth'
groupby: evt.Meta.source_ip
distinct: evt.Meta.target_user
leakspeed: 60s
capacity: 10
blackhole: 1m
labels:
service: ssh
type: bruteforce
remediation: true

View File

@@ -0,0 +1,16 @@
type: trigger
format: 2.0
#debug: true
name: crowdsecurity/thinkphp-cve-2018-20062
description: "Detect ThinkPHP CVE-2018-20062 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("thinkphp_cve_2018-20062.txt"), {Upper(evt.Meta.http_path) matches Upper(#)})
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/thinkphp_cve_2018-20062.txt
dest_file: thinkphp_cve_2018-20062.txt
type: string
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,11 @@
type: trigger
format: 2.0
name: crowdsecurity/vmware-cve-2022-22954
description: "Detect Vmware CVE-2022-22954 exploitation attempts"
filter: |
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && Upper(QueryUnescape(evt.Meta.http_path)) startsWith Upper('/catalog-portal/ui/oauth/verify?error=&deviceUdid=${"freemarker.template.utility.Execute"?new()(')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,11 @@
type: trigger
format: 2.0
name: crowdsecurity/vmware-vcenter-vmsa-2021-0027
description: "Detect VMSA-2021-0027 exploitation attemps"
filter: |
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && evt.Meta.http_path matches '/ui/vcav-bootstrap/rest/vcav-providers/provider-logo\\?url=(file|http)'
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,12 @@
#contributed by ltsich
type: trigger
name: ltsich/http-w00tw00t
description: "detect w00tw00t"
debug: false
filter: "evt.Meta.log_type == 'http_access-log' && evt.Parsed.file_name contains 'w00tw00t.at.ISC.SANS.DFind'"
groupby: evt.Meta.source_ip
blackhole: 5m
labels:
service: http
type: scan
remediation: true

View File

@@ -0,0 +1,3 @@
url: http://0.0.0.0:8080
login: localhost
password: Q8A8aV6bUtj50QbzwtbJczButlAaKmmGafn784Go1ERjXtNS9mwQ4XHJNQ9294VL

View File

@@ -0,0 +1,45 @@
type: email # Don't change
name: email_default # Must match the registered plugin in the profile
# One of "trace", "debug", "info", "warn", "error", "off"
log_level: info
# group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s"
# group_threshold: # Amount of alerts that triggers a message before <group_wait> has expired, eg "10"
# max_retry: # Number of attempts to relay messages to plugins in case of error
timeout: 20s # Time to wait for response from the plugin before considering the attempt a failure, eg "10s"
#-------------------------
# plugin-specific options
# The following template receives a list of models.Alert objects
# The output goes in the email message body
format: |
{{range . -}}
{{$alert := . -}}
{{range .Decisions -}}
<html><body><p><a href=https://www.whois.com/whois/{{.Value}}>{{.Value}}</a> will get <b>{{.Type}}</b> for next <b>{{.Duration}}</b> for triggering <b>{{.Scenario}}</b> on machine <b>{{$alert.MachineID}}</b>.</p> <p><a href=https://app.crowdsec.net/cti/{{.Value}}>CrowdSec CTI</a></p></body></html>
{{end -}}
{{end -}}
smtp_host: # example: smtp.gmail.com
smtp_username: # Replace with your actual username
smtp_password: # Replace with your actual password
smtp_port: # Common values are any of [25, 465, 587, 2525]
auth_type: # Valid choices are "none", "crammd5", "login", "plain"
sender_name: "CrowdSec"
sender_email: # example: foo@gmail.com
email_subject: "CrowdSec Notification"
receiver_emails:
# - email1@gmail.com
# - email2@gmail.com
# One of "ssltls", "starttls", "none"
encryption_type: ssltls
---
# type: email
# name: email_second_notification
# ...

View File

@@ -0,0 +1,36 @@
type: http # Don't change
name: http_default # Must match the registered plugin in the profile
# One of "trace", "debug", "info", "warn", "error", "off"
log_level: info
# group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s"
# group_threshold: # Amount of alerts that triggers a message before <group_wait> has expired, eg "10"
# max_retry: # Number of attempts to relay messages to plugins in case of error
# timeout: # Time to wait for response from the plugin before considering the attempt a failure, eg "10s"
#-------------------------
# plugin-specific options
# The following template receives a list of models.Alert objects
# The output goes in the http request body
format: |
{{.|toJson}}
# The plugin will make requests to this url, eg: https://www.example.com/
url: <HTTP_url>
# Any of the http verbs: "POST", "GET", "PUT"...
method: POST
# headers:
# Authorization: token 0x64312313
# skip_tls_verification: # true or false. Default is false
---
# type: http
# name: http_second_notification
# ...

View File

@@ -0,0 +1,36 @@
type: slack # Don't change
name: slack_default # Must match the registered plugin in the profile
# One of "trace", "debug", "info", "warn", "error", "off"
log_level: info
# group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s"
# group_threshold: # Amount of alerts that triggers a message before <group_wait> has expired, eg "10"
# max_retry: # Number of attempts to relay messages to plugins in case of error
# timeout: # Time to wait for response from the plugin before considering the attempt a failure, eg "10s"
#-------------------------
# plugin-specific options
# The following template receives a list of models.Alert objects
# The output goes in the slack message
format: |
{{range . -}}
{{$alert := . -}}
{{range .Decisions -}}
{{if $alert.Source.Cn -}}
:flag-{{$alert.Source.Cn}}: <https://www.whois.com/whois/{{.Value}}|{{.Value}}> will get {{.Type}} for next {{.Duration}} for triggering {{.Scenario}} on machine '{{$alert.MachineID}}'. <https://app.crowdsec.net/cti/{{.Value}}|CrowdSec CTI>{{end}}
{{if not $alert.Source.Cn -}}
:pirate_flag: <https://www.whois.com/whois/{{.Value}}|{{.Value}}> will get {{.Type}} for next {{.Duration}} for triggering {{.Scenario}} on machine '{{$alert.MachineID}}'. <https://app.crowdsec.net/cti/{{.Value}}|CrowdSec CTI>{{end}}
{{end -}}
{{end -}}
webhook: <WEBHOOK_URL>
---
# type: slack
# name: slack_second_notification
# ...

View File

@@ -0,0 +1,28 @@
type: splunk # Don't change
name: splunk_default # Must match the registered plugin in the profile
# One of "trace", "debug", "info", "warn", "error", "off"
log_level: info
# group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s"
# group_threshold: # Amount of alerts that triggers a message before <group_wait> has expired, eg "10"
# max_retry: # Number of attempts to relay messages to plugins in case of error
# timeout: # Time to wait for response from the plugin before considering the attempt a failure, eg "10s"
#-------------------------
# plugin-specific options
# The following template receives a list of models.Alert objects
# The output goes in the splunk notification
format: |
{{.|toJson}}
url: <SPLUNK_HTTP_URL>
token: <SPLUNK_TOKEN>
---
# type: splunk
# name: splunk_second_notification
# ...

View File

@@ -0,0 +1,3 @@
url: https://api.crowdsec.net/
login: 54fe1367282543d88a34ebe89afdfb19QN0T7nGl0vP8US28
password: RlkzrPCNPKrwgaAmpjY3LUIdyobj74435KzasHF89w4gqQ1E36DAfja8k0BlUBJx

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/parsers/s00-raw/crowdsecurity/cri-logs.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/parsers/s00-raw/crowdsecurity/docker-logs.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/parsers/s01-parse/crowdsecurity/dovecot-logs.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/parsers/s01-parse/crowdsecurity/nginx-logs.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/parsers/s01-parse/crowdsecurity/postfix-logs.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/parsers/s01-parse/crowdsecurity/postscreen-logs.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/parsers/s01-parse/crowdsecurity/traefik-logs.yaml

View File

@@ -0,0 +1 @@
/etc/crowdsec/hub/parsers/s02-enrich/crowdsecurity/http-logs.yaml

View File

@@ -0,0 +1,11 @@
S3_REQUEST_LINE (?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})
S3_ACCESS_LOG %{WORD:owner} %{NOTSPACE:bucket} \[%{HTTPDATE:timestamp}\] %{IP:clientip} %{NOTSPACE:requester} %{NOTSPACE:request_id} %{NOTSPACE:operation} %{NOTSPACE:key} (?:"%{S3_REQUEST_LINE}"|-) (?:%{INT:response:int}|-) (?:-|%{NOTSPACE:error_code}) (?:%{INT:bytes:int}|-) (?:%{INT:object_size:int}|-) (?:%{INT:request_time_ms:int}|-) (?:%{INT:turnaround_time_ms:int}|-) (?:%{QS:referrer}|-) (?:"?%{QS:agent}"?|-) (?:-|%{NOTSPACE:version_id})
ELB_URIPATHPARAM %{URIPATH:path}(?:%{URIPARAM:params})?
ELB_URI %{URIPROTO:proto}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST:urihost})?(?:%{ELB_URIPATHPARAM})?
ELB_REQUEST_LINE (?:%{WORD:verb} %{ELB_URI:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})
ELB_ACCESS_LOG %{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:elb} %{IP:clientip}:%{INT:clientport:int} (?:(%{IP:backendip}:?:%{INT:backendport:int})|-) %{NUMBER:request_processing_time:float} %{NUMBER:backend_processing_time:float} %{NUMBER:response_processing_time:float} %{INT:response:int} %{INT:backend_response:int} %{INT:received_bytes:int} %{INT:bytes:int} "%{ELB_REQUEST_LINE}"

View File

@@ -0,0 +1,50 @@
BACULA_TIMESTAMP %{MONTHDAY}-%{MONTH} %{HOUR}:%{MINUTE}
BACULA_HOST [a-zA-Z0-9-]+
BACULA_VOLUME %{USER}
BACULA_DEVICE %{USER}
BACULA_DEVICEPATH %{UNIXPATH}
BACULA_CAPACITY %{INT}{1,3}(,%{INT}{3})*
BACULA_VERSION %{USER}
BACULA_JOB %{USER}
BACULA_LOG_MAX_CAPACITY User defined maximum volume capacity %{BACULA_CAPACITY} exceeded on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\)
BACULA_LOG_END_VOLUME End of medium on Volume \"%{BACULA_VOLUME:volume}\" Bytes=%{BACULA_CAPACITY} Blocks=%{BACULA_CAPACITY} at %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}.
BACULA_LOG_NEW_VOLUME Created new Volume \"%{BACULA_VOLUME:volume}\" in catalog.
BACULA_LOG_NEW_LABEL Labeled new Volume \"%{BACULA_VOLUME:volume}\" on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\).
BACULA_LOG_WROTE_LABEL Wrote label to prelabeled Volume \"%{BACULA_VOLUME:volume}\" on device \"%{BACULA_DEVICE}\" \(%{BACULA_DEVICEPATH}\)
BACULA_LOG_NEW_MOUNT New volume \"%{BACULA_VOLUME:volume}\" mounted on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\) at %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}.
BACULA_LOG_NOOPEN \s+Cannot open %{DATA}: ERR=%{GREEDYDATA:berror}
BACULA_LOG_NOOPENDIR \s+Could not open directory %{DATA}: ERR=%{GREEDYDATA:berror}
BACULA_LOG_NOSTAT \s+Could not stat %{DATA}: ERR=%{GREEDYDATA:berror}
BACULA_LOG_NOJOBS There are no more Jobs associated with Volume \"%{BACULA_VOLUME:volume}\". Marking it purged.
BACULA_LOG_ALL_RECORDS_PRUNED All records pruned from Volume \"%{BACULA_VOLUME:volume}\"; marking it \"Purged\"
BACULA_LOG_BEGIN_PRUNE_JOBS Begin pruning Jobs older than %{INT} month %{INT} days .
BACULA_LOG_BEGIN_PRUNE_FILES Begin pruning Files.
BACULA_LOG_PRUNED_JOBS Pruned %{INT} Jobs* for client %{BACULA_HOST:client} from catalog.
BACULA_LOG_PRUNED_FILES Pruned Files from %{INT} Jobs* for client %{BACULA_HOST:client} from catalog.
BACULA_LOG_ENDPRUNE End auto prune.
BACULA_LOG_STARTJOB Start Backup JobId %{INT}, Job=%{BACULA_JOB:job}
BACULA_LOG_STARTRESTORE Start Restore Job %{BACULA_JOB:job}
BACULA_LOG_USEDEVICE Using Device \"%{BACULA_DEVICE:device}\"
BACULA_LOG_DIFF_FS \s+%{UNIXPATH} is a different filesystem. Will not descend from %{UNIXPATH} into it.
BACULA_LOG_JOBEND Job write elapsed time = %{DATA:elapsed}, Transfer rate = %{NUMBER} (K|M|G)? Bytes/second
BACULA_LOG_NOPRUNE_JOBS No Jobs found to prune.
BACULA_LOG_NOPRUNE_FILES No Files found to prune.
BACULA_LOG_VOLUME_PREVWRITTEN Volume \"%{BACULA_VOLUME:volume}\" previously written, moving to end of data.
BACULA_LOG_READYAPPEND Ready to append to end of Volume \"%{BACULA_VOLUME:volume}\" size=%{INT}
BACULA_LOG_CANCELLING Cancelling duplicate JobId=%{INT}.
BACULA_LOG_MARKCANCEL JobId %{INT}, Job %{BACULA_JOB:job} marked to be canceled.
BACULA_LOG_CLIENT_RBJ shell command: run ClientRunBeforeJob \"%{GREEDYDATA:runjob}\"
BACULA_LOG_VSS (Generate )?VSS (Writer)?
BACULA_LOG_MAXSTART Fatal error: Job canceled because max start delay time exceeded.
BACULA_LOG_DUPLICATE Fatal error: JobId %{INT:duplicate} already running. Duplicate job not allowed.
BACULA_LOG_NOJOBSTAT Fatal error: No Job status returned from FD.
BACULA_LOG_FATAL_CONN Fatal error: bsock.c:133 Unable to connect to (Client: %{BACULA_HOST:client}|Storage daemon) on %{HOSTNAME}:%{POSINT}. ERR=%{GREEDYDATA:berror}
BACULA_LOG_NO_CONNECT Warning: bsock.c:127 Could not connect to (Client: %{BACULA_HOST:client}|Storage daemon) on %{HOSTNAME}:%{POSINT}. ERR=%{GREEDYDATA:berror}
BACULA_LOG_NO_AUTH Fatal error: Unable to authenticate with File daemon at %{HOSTNAME}. Possible causes:
BACULA_LOG_NOSUIT No prior or suitable Full backup found in catalog. Doing FULL backup.
BACULA_LOG_NOPRIOR No prior Full backup Job record found.
BACULA_LOG_JOB (Error: )?Bacula %{BACULA_HOST} %{BACULA_VERSION} \(%{BACULA_VERSION}\):
BACULA_LOGLINE %{BACULA_TIMESTAMP:bts} %{BACULA_HOST:hostname} JobId %{INT:jobid}: (%{BACULA_LOG_MAX_CAPACITY}|%{BACULA_LOG_END_VOLUME}|%{BACULA_LOG_NEW_VOLUME}|%{BACULA_LOG_NEW_LABEL}|%{BACULA_LOG_WROTE_LABEL}|%{BACULA_LOG_NEW_MOUNT}|%{BACULA_LOG_NOOPEN}|%{BACULA_LOG_NOOPENDIR}|%{BACULA_LOG_NOSTAT}|%{BACULA_LOG_NOJOBS}|%{BACULA_LOG_ALL_RECORDS_PRUNED}|%{BACULA_LOG_BEGIN_PRUNE_JOBS}|%{BACULA_LOG_BEGIN_PRUNE_FILES}|%{BACULA_LOG_PRUNED_JOBS}|%{BACULA_LOG_PRUNED_FILES}|%{BACULA_LOG_ENDPRUNE}|%{BACULA_LOG_STARTJOB}|%{BACULA_LOG_STARTRESTORE}|%{BACULA_LOG_USEDEVICE}|%{BACULA_LOG_DIFF_FS}|%{BACULA_LOG_JOBEND}|%{BACULA_LOG_NOPRUNE_JOBS}|%{BACULA_LOG_NOPRUNE_FILES}|%{BACULA_LOG_VOLUME_PREVWRITTEN}|%{BACULA_LOG_READYAPPEND}|%{BACULA_LOG_CANCELLING}|%{BACULA_LOG_MARKCANCEL}|%{BACULA_LOG_CLIENT_RBJ}|%{BACULA_LOG_VSS}|%{BACULA_LOG_MAXSTART}|%{BACULA_LOG_DUPLICATE}|%{BACULA_LOG_NOJOBSTAT}|%{BACULA_LOG_FATAL_CONN}|%{BACULA_LOG_NO_CONNECT}|%{BACULA_LOG_NO_AUTH}|%{BACULA_LOG_NOSUIT}|%{BACULA_LOG_JOB}|%{BACULA_LOG_NOPRIOR})

View File

@@ -0,0 +1,13 @@
# https://www.bro.org/sphinx/script-reference/log-files.html
# http.log
BRO_HTTP %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{INT:trans_depth}\t%{GREEDYDATA:method}\t%{GREEDYDATA:domain}\t%{GREEDYDATA:uri}\t%{GREEDYDATA:referrer}\t%{GREEDYDATA:user_agent}\t%{NUMBER:request_body_len}\t%{NUMBER:response_body_len}\t%{GREEDYDATA:status_code}\t%{GREEDYDATA:status_msg}\t%{GREEDYDATA:info_code}\t%{GREEDYDATA:info_msg}\t%{GREEDYDATA:filename}\t%{GREEDYDATA:bro_tags}\t%{GREEDYDATA:username}\t%{GREEDYDATA:password}\t%{GREEDYDATA:proxied}\t%{GREEDYDATA:orig_fuids}\t%{GREEDYDATA:orig_mime_types}\t%{GREEDYDATA:resp_fuids}\t%{GREEDYDATA:resp_mime_types}
# dns.log
BRO_DNS %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{WORD:proto}\t%{INT:trans_id}\t%{GREEDYDATA:query}\t%{GREEDYDATA:qclass}\t%{GREEDYDATA:qclass_name}\t%{GREEDYDATA:qtype}\t%{GREEDYDATA:qtype_name}\t%{GREEDYDATA:rcode}\t%{GREEDYDATA:rcode_name}\t%{GREEDYDATA:AA}\t%{GREEDYDATA:TC}\t%{GREEDYDATA:RD}\t%{GREEDYDATA:RA}\t%{GREEDYDATA:Z}\t%{GREEDYDATA:answers}\t%{GREEDYDATA:TTLs}\t%{GREEDYDATA:rejected}
# conn.log
BRO_CONN %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{WORD:proto}\t%{GREEDYDATA:service}\t%{NUMBER:duration}\t%{NUMBER:orig_bytes}\t%{NUMBER:resp_bytes}\t%{GREEDYDATA:conn_state}\t%{GREEDYDATA:local_orig}\t%{GREEDYDATA:missed_bytes}\t%{GREEDYDATA:history}\t%{GREEDYDATA:orig_pkts}\t%{GREEDYDATA:orig_ip_bytes}\t%{GREEDYDATA:resp_pkts}\t%{GREEDYDATA:resp_ip_bytes}\t%{GREEDYDATA:tunnel_parents}
# files.log
BRO_FILES %{NUMBER:ts}\t%{NOTSPACE:fuid}\t%{IP:tx_hosts}\t%{IP:rx_hosts}\t%{NOTSPACE:conn_uids}\t%{GREEDYDATA:source}\t%{GREEDYDATA:depth}\t%{GREEDYDATA:analyzers}\t%{GREEDYDATA:mime_type}\t%{GREEDYDATA:filename}\t%{GREEDYDATA:duration}\t%{GREEDYDATA:local_orig}\t%{GREEDYDATA:is_orig}\t%{GREEDYDATA:seen_bytes}\t%{GREEDYDATA:total_bytes}\t%{GREEDYDATA:missing_bytes}\t%{GREEDYDATA:overflow_bytes}\t%{GREEDYDATA:timedout}\t%{GREEDYDATA:parent_fuid}\t%{GREEDYDATA:md5}\t%{GREEDYDATA:sha1}\t%{GREEDYDATA:sha256}\t%{GREEDYDATA:extracted}

View File

@@ -0,0 +1 @@
COWRIE_NEW_CO New connection: %{IPV4:source_ip}:[0-9]+ \(%{IPV4:dest_ip}:%{INT:dest_port}\) \[session: %{DATA:telnet_session}\]$

Some files were not shown because too many files have changed in this diff Show More