Initial commit

This commit is contained in:
root
2023-08-09 14:01:28 +02:00
commit f4efbc7a63
199 changed files with 13338 additions and 0 deletions

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,25 @@
parsers:
- crowdsecurity/http-logs
scenarios:
- crowdsecurity/http-crawl-non_statics
- crowdsecurity/http-probing
- crowdsecurity/http-bad-user-agent
- crowdsecurity/http-path-traversal-probing
- crowdsecurity/http-sensitive-files
- crowdsecurity/http-sqli-probing
- crowdsecurity/http-xss-probing
- crowdsecurity/http-backdoors-attempts
- ltsich/http-w00tw00t
- crowdsecurity/http-generic-bf
- crowdsecurity/http-open-proxy
collections:
- crowdsecurity/http-cve
description: "http common : scanners detection"
author: crowdsecurity
tags:
- linux
- http
- crawl
- scan

View File

@@ -0,0 +1,10 @@
parsers:
- crowdsecurity/dovecot-logs
scenarios:
- crowdsecurity/dovecot-spam
description: "dovecot support : parser and spammer detection"
author: crowdsecurity
tags:
- linux
- spam
- bruteforce

View File

@@ -0,0 +1,30 @@
scenarios:
- crowdsecurity/http-cve-2021-41773
- crowdsecurity/http-cve-2021-42013
- crowdsecurity/grafana-cve-2021-43798
- crowdsecurity/vmware-vcenter-vmsa-2021-0027
- crowdsecurity/fortinet-cve-2018-13379
- crowdsecurity/pulse-secure-sslvpn-cve-2019-11510
- crowdsecurity/f5-big-ip-cve-2020-5902
- crowdsecurity/thinkphp-cve-2018-20062
- crowdsecurity/apache_log4j2_cve-2021-44228
- crowdsecurity/jira_cve-2021-26086
- crowdsecurity/spring4shell_cve-2022-22965
- crowdsecurity/vmware-cve-2022-22954
- crowdsecurity/CVE-2022-37042
- crowdsecurity/CVE-2022-41082
- crowdsecurity/CVE-2022-35914
- crowdsecurity/CVE-2022-40684
- crowdsecurity/CVE-2022-26134
- crowdsecurity/CVE-2022-42889
- crowdsecurity/CVE-2022-41697
- crowdsecurity/CVE-2022-46169
- crowdsecurity/CVE-2022-44877
- crowdsecurity/CVE-2019-18935
- crowdsecurity/netgear_rce
author: crowdsecurity
tags:
- web
- exploit
- cve
- http

View File

@@ -0,0 +1,11 @@
parsers:
- crowdsecurity/syslog-logs
- crowdsecurity/geoip-enrich
- crowdsecurity/dateparse-enrich
collections:
- crowdsecurity/sshd
description: "core linux support : syslog+geoip+ssh"
author: crowdsecurity
tags:
- linux

View File

@@ -0,0 +1,15 @@
parsers:
#generic post-parsing of http stuff
- crowdsecurity/nginx-logs
collections:
- crowdsecurity/base-http-scenarios
scenarios:
- crowdsecurity/nginx-req-limit-exceeded
description: "nginx support : parser and generic http scenarios"
author: crowdsecurity
tags:
- linux
- nginx
- crawl
- scan

View File

@@ -0,0 +1,11 @@
parsers:
- crowdsecurity/postfix-logs
- crowdsecurity/postscreen-logs
scenarios:
- crowdsecurity/postfix-spam
description: "postfix support : parser and spammer detection"
author: crowdsecurity
tags:
- linux
- spam
- bruteforce

View File

@@ -0,0 +1,12 @@
parsers:
- crowdsecurity/sshd-logs
scenarios:
- crowdsecurity/ssh-bf
- crowdsecurity/ssh-slow-bf
description: "sshd support : parser and brute-force detection"
author: crowdsecurity
tags:
- linux
- ssh
- bruteforce

View File

@@ -0,0 +1,12 @@
# co-authored with gmelodie (https://github.com/gmelodie)
parsers:
- crowdsecurity/traefik-logs
collections:
- crowdsecurity/base-http-scenarios
description: "traefik support: parser and generic http scenarios"
author: crowdsecurity
tags:
- traefik
- http
- bruteforce

View File

@@ -0,0 +1,10 @@
postoverflows:
- crowdsecurity/seo-bots-whitelist
- crowdsecurity/cdn-whitelist
- crowdsecurity/rdns
description: "Good actors whitelists"
author: crowdsecurity
tags:
- whitelist
- bots
- partners

View File

@@ -0,0 +1,19 @@
filter: "evt.Line.Labels.type == 'containerd'"
onsuccess: next_stage
name: crowdsecurity/cri-logs
description: CRI logging format parser
nodes:
- grok:
pattern: "^%{TIMESTAMP_ISO8601:cri_timestamp} %{WORD:stream} %{WORD:logtag} %{GREEDYDATA:message}"
apply_on: Line.Raw
statics:
- parsed: "logsource"
value: "cri"
- target: evt.StrTime
expression: evt.Parsed.cri_timestamp
- parsed: program
expression: evt.Line.Labels.program
- meta: datasource_path
expression: evt.Line.Src
- meta: datasource_type
expression: evt.Line.Module

View File

@@ -0,0 +1,12 @@
#If it's docker, we are going to extract log line from it
filter: "evt.Line.Labels.type == 'docker'"
onsuccess: next_stage
name: crowdsecurity/docker-logs
description: docker json logs parser
statics:
- target: evt.StrTime
expression: JsonExtract(evt.Line.Raw, "time")
- parsed: message
expression: JsonExtractUnescape(evt.Line.Raw, "log")
- parsed: program
expression: evt.Line.Labels.program

View File

@@ -0,0 +1,48 @@
#If it's syslog, we are going to extract progname from it
filter: "evt.Line.Labels.type == 'syslog'"
onsuccess: next_stage
pattern_syntax:
RAW_SYSLOG_PREFIX: '^<%{NUMBER:stuff1}>%{NUMBER:stuff2} %{SYSLOGBASE2} %{DATA:program} %{NUMBER:pid}'
RAW_SYSLOG_META: '\[meta sequenceId="%{NOTDQUOTE:seq_id}"\]'
name: crowdsecurity/syslog-logs
nodes:
- grok:
#this is a named regular expression. grok patterns can be kept into separate files for readability
pattern: "^%{SYSLOGLINE}"
#This is the field of the `Event` to which the regexp should be applied
apply_on: Line.Raw
- grok:
#a second pattern for unparsed syslog lines, as saw in opnsense
pattern: '%{RAW_SYSLOG_PREFIX} - %{RAW_SYSLOG_META} %{GREEDYDATA:message}'
apply_on: Line.Raw
#if the node was successfull, statics will be applied.
statics:
- meta: machine
expression: evt.Parsed.logsource
- parsed: "logsource"
value: "syslog"
# syslog date can be in two different fields (one of hte assignment will fail)
- target: evt.StrTime
expression: evt.Parsed.timestamp
- target: evt.StrTime
expression: evt.Parsed.timestamp8601
- meta: datasource_path
expression: evt.Line.Src
- meta: datasource_type
expression: evt.Line.Module
---
#if it's not syslog, the type is the progname
filter: "evt.Line.Labels.type != 'syslog'"
onsuccess: next_stage
name: crowdsecurity/non-syslog
#debug: true
statics:
- parsed: message
expression: evt.Line.Raw
- parsed: program
expression: evt.Line.Labels.type
- meta: datasource_path
expression: evt.Line.Src
- meta: datasource_type
expression: evt.Line.Module

View File

@@ -0,0 +1,26 @@
#contribution by @ltsich
onsuccess: next_stage
debug: false
filter: "evt.Parsed.program == 'dovecot'"
name: crowdsecurity/dovecot-logs
description: "Parse dovecot logs"
nodes:
- grok:
pattern: "%{WORD:protocol}-login: %{DATA:dovecot_login_message}: user=<%{DATA:dovecot_user}>.*, rip=%{IP:dovecot_remote_ip}, lip=%{IP:dovecot_local_ip}"
apply_on: message
- grok:
pattern: "auth-worker\\(%{INT}\\): %{WORD:dovecot_user_backend}\\(%{DATA:dovecot_user},%{IP:dovecot_remote_ip},?%{DATA}\\): (%{DATA}: )?%{DATA:dovecot_login_message}$"
apply_on: message
- grok:
pattern: "auth-worker\\(%{INT}\\): (Info: )?conn unix:auth-worker \\(pid=%{INT},uid=%{INT}\\): auth-worker<%{INT}>: %{WORD:dovecot_user_backend}\\(%{DATA:dovecot_user},%{IP:dovecot_remote_ip},?%{DATA}\\): (%{DATA}: )?%{DATA:dovecot_login_message}$"
apply_on: message
- grok:
pattern: "auth: passwd-file\\(%{DATA:dovecot_user},%{IP:dovecot_remote_ip}\\): (%{DATA}: )?%{DATA:dovecot_login_message}$"
apply_on: message
statics:
- meta: log_type
value: dovecot_logs
- meta: source_ip
expression: "evt.Parsed.dovecot_remote_ip"
- meta: dovecot_login_result
expression: "any(['Authentication failure', 'Password mismatch', 'password mismatch', 'auth failed', 'unknown user'], {evt.Parsed.dovecot_login_message contains #}) ? 'auth_failed' : ''"

View File

@@ -0,0 +1,70 @@
filter: "evt.Parsed.program startsWith 'nginx'"
onsuccess: next_stage
name: crowdsecurity/nginx-logs
description: "Parse nginx access and error logs"
pattern_syntax:
NGCUSTOMUSER: '[a-zA-Z0-9\.\@\-\+_%]+'
nodes:
- grok:
pattern: '(%{IPORHOST:target_fqdn} )?%{IPORHOST:remote_addr} - %{NGCUSTOMUSER:remote_user}? \[%{HTTPDATE:time_local}\] "%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:http_version}" %{NUMBER:status} %{NUMBER:body_bytes_sent} "%{NOTDQUOTE:http_referer}" "%{NOTDQUOTE:http_user_agent}"( %{NUMBER:request_length} %{NUMBER:request_time} \[%{DATA:proxy_upstream_name}\] \[%{DATA:proxy_alternative_upstream_name}\])?'
apply_on: message
statics:
- meta: log_type
value: http_access-log
- target: evt.StrTime
expression: evt.Parsed.time_local
- grok:
# and this one the error log
pattern: '(%{IPORHOST:target_fqdn} )?%{NGINXERRTIME:time} \[%{LOGLEVEL:loglevel}\] %{NONNEGINT:pid}#%{NONNEGINT:tid}: (\*%{NONNEGINT:cid} )?%{GREEDYDATA:message}, client: %{IPORHOST:remote_addr}, server: %{DATA:target_fqdn}, request: "%{WORD:verb} ([^/]+)?%{URIPATHPARAM:request}( HTTP/%{NUMBER:http_version})?", host: "%{IPORHOST}(:%{NONNEGINT})?"'
apply_on: message
statics:
- meta: log_type
value: http_error-log
- target: evt.StrTime
expression: evt.Parsed.time
pattern_syntax:
NO_DOUBLE_QUOTE: '[^"]+'
onsuccess: next_stage
nodes:
- filter: "evt.Parsed.message contains 'was not found in'"
pattern_syntax:
USER_NOT_FOUND: 'user "%{NO_DOUBLE_QUOTE:username}" was not found in "%{NO_DOUBLE_QUOTE}"'
grok:
pattern: '%{USER_NOT_FOUND}'
apply_on: message
statics:
- meta: sub_type
value: "auth_fail"
- meta: username
expression: evt.Parsed.username
- filter: "evt.Parsed.message contains 'password mismatch'"
pattern_syntax:
PASSWORD_MISMATCH: 'user "%{NO_DOUBLE_QUOTE:username}": password mismatch'
grok:
pattern: '%{PASSWORD_MISMATCH}'
apply_on: message
statics:
- meta: sub_type
value: "auth_fail"
- meta: username
expression: evt.Parsed.username
- filter: "evt.Parsed.message contains 'limiting requests, excess'"
statics:
- meta: sub_type
value: "req_limit_exceeded"
# these ones apply for both grok patterns
statics:
- meta: service
value: http
- meta: source_ip
expression: "evt.Parsed.remote_addr"
- meta: http_status
expression: "evt.Parsed.status"
- meta: http_path
expression: "evt.Parsed.request"
- meta: http_verb
expression: "evt.Parsed.verb"
- meta: http_user_agent
expression: "evt.Parsed.http_user_agent"
- meta: target_fqdn
expression: "evt.Parsed.target_fqdn"

View File

@@ -0,0 +1,61 @@
# Copyright (c) 2014, 2015, Rudy Gevaert
# Copyright (c) 2020 Crowdsec
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Some of the groks used here are from https://github.com/rgevaert/grok-patterns/blob/master/grok.d/postfix_patterns
onsuccess: next_stage
filter: "evt.Parsed.program in ['postfix/smtpd','postfix/smtps/smtpd','postfix/submission/smtpd', 'postfix/smtps-haproxy/smtpd', 'postfix/submission-haproxy/smtpd']"
name: crowdsecurity/postfix-logs
pattern_syntax:
POSTFIX_HOSTNAME: '(%{HOSTNAME}|unknown)'
POSTFIX_COMMAND: '(AUTH|STARTTLS|CONNECT|EHLO|HELO|RCPT)'
POSTFIX_ACTION: 'discard|dunno|filter|hold|ignore|info|prepend|redirect|replace|reject|warn'
RELAY: '(?:%{HOSTNAME:remote_host}(?:\[%{IP:remote_addr}\](?::[0-9]+(.[0-9]+)?)?)?)'
description: "Parse postfix logs"
nodes:
- grok:
apply_on: message
pattern: 'lost connection after %{DATA:smtp_response} from %{RELAY}'
statics:
- meta: log_type_enh
value: spam-attempt
- grok:
apply_on: message
pattern: 'warning: %{POSTFIX_HOSTNAME:remote_host}\[%{IP:remote_addr}\]: SASL ((?i)LOGIN|PLAIN|(?:CRAM|DIGEST)-MD5) authentication failed:%{GREEDYDATA:message_failure}'
statics:
- meta: log_type_enh
value: spam-attempt
- grok:
apply_on: message
pattern: 'NOQUEUE: %{POSTFIX_ACTION:action}: %{DATA:command} from %{RELAY}: %{GREEDYDATA:reason}'
statics:
- meta: action
expression: "evt.Parsed.action"
statics:
- meta: service
value: postfix
- meta: source_ip
expression: "evt.Parsed.remote_addr"
- meta: source_hostname
expression: "evt.Parsed.remote_host"
- meta: log_type
value: postfix

View File

@@ -0,0 +1,20 @@
onsuccess: next_stage
filter: "evt.Parsed.program in ['postfix/postscreen', 'haproxy/postscreen']"
name: crowdsecurity/postscreen-logs
pattern_syntax:
POSTSCREEN_PREGREET: 'PREGREET'
POSTSCREEN_PREGREET_TIME_ATTEMPT: '\d+.\d+'
description: "Parse postscreen logs"
nodes:
- grok:
apply_on: message
pattern: '%{POSTSCREEN_PREGREET:pregreet} %{INT:count} after %{POSTSCREEN_PREGREET_TIME_ATTEMPT:time_attempt} from \[%{IP:remote_addr}\]:%{INT:port}: %{GREEDYDATA:message_attempt}'
statics:
- meta: service
value: postscreen
- meta: source_ip
expression: "evt.Parsed.remote_addr"
- meta: pregreet
expression: "evt.Parsed.pregreet"

View File

@@ -0,0 +1,100 @@
onsuccess: next_stage
#debug: true
filter: "evt.Parsed.program == 'sshd'"
name: crowdsecurity/sshd-logs
description: "Parse openSSH logs"
pattern_syntax:
# The IP grok pattern that ships with crowdsec is buggy and does not capture the last digit of an IP if it is the last thing it matches, and the last octet starts with a 2
# https://github.com/crowdsecurity/crowdsec/issues/938
IPv4_WORKAROUND: (?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)
IP_WORKAROUND: (?:%{IPV6}|%{IPv4_WORKAROUND})
SSHD_AUTH_FAIL: 'pam_%{DATA:pam_type}\(sshd:auth\): authentication failure; logname= uid=%{NUMBER:uid}? euid=%{NUMBER:euid}? tty=ssh ruser= rhost=%{IP_WORKAROUND:sshd_client_ip}( %{SPACE}user=%{USERNAME:sshd_invalid_user})?'
SSHD_MAGIC_VALUE_FAILED: 'Magic value check failed \(\d+\) on obfuscated handshake from %{IP_WORKAROUND:sshd_client_ip} port \d+'
SSHD_INVALID_USER: 'Invalid user\s*%{USERNAME:sshd_invalid_user}? from %{IP_WORKAROUND:sshd_client_ip}( port \d+)?'
SSHD_INVALID_BANNER: 'banner exchange: Connection from %{IP_WORKAROUND:sshd_client_ip} port \d+: invalid format'
SSHD_PREAUTH_AUTHENTICATING_USER: 'Connection closed by (authenticating|invalid) user %{USERNAME:sshd_invalid_user} %{IP_WORKAROUND:sshd_client_ip} port \d+ \[preauth\]'
#following: https://github.com/crowdsecurity/crowdsec/issues/1201 - some scanners behave differently and trigger this one
SSHD_PREAUTH_AUTHENTICATING_USER_ALT: 'Disconnected from (authenticating|invalid) user %{USERNAME:sshd_invalid_user} %{IP_WORKAROUND:sshd_client_ip} port \d+ \[preauth\]'
SSHD_BAD_KEY_NEGOTIATION: 'Unable to negotiate with %{IP_WORKAROUND:sshd_client_ip} port \d+: no matching (host key type|key exchange method|MAC) found.'
nodes:
- grok:
name: "SSHD_FAIL"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_PREAUTH_AUTHENTICATING_USER_ALT"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_PREAUTH_AUTHENTICATING_USER"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_DISC_PREAUTH"
apply_on: message
- grok:
name: "SSHD_BAD_VERSION"
apply_on: message
- grok:
name: "SSHD_INVALID_USER"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_INVALID_BANNER"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: extra_log_type
value: ssh_bad_banner
- grok:
name: "SSHD_USER_FAIL"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_AUTH_FAIL"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_MAGIC_VALUE_FAILED"
apply_on: message
statics:
- meta: log_type
value: ssh_failed-auth
- meta: target_user
expression: "evt.Parsed.sshd_invalid_user"
- grok:
name: "SSHD_BAD_KEY_NEGOTIATION"
apply_on: message
statics:
- meta: log_type
value: ssh_bad_keyexchange
statics:
- meta: service
value: ssh
- meta: source_ip
expression: "evt.Parsed.sshd_client_ip"

View File

@@ -0,0 +1,69 @@
# co-authored with gmelodie (https://github.com/gmelodie)
name: crowdsecurity/traefik-logs
description: "Parse Traefik access logs"
filter: "evt.Parsed.program startsWith 'traefik'"
#debug: true
onsuccess: next_stage
pattern_syntax:
TRAEFIK_ROUTER: '(%{USER}@%{URIHOST}|\-)'
TRAEFIK_SERVER_URL: '(%{URI}|\-)'
NUMBER_MINUS: '[0-9-]+'
NGCUSTOMUSER: '[a-zA-Z0-9\.\@\-\+_%]+'
NGINXACCESS2: '%{IPORHOST:remote_addr} - %{NGCUSTOMUSER:remote_user} \[%{HTTPDATE:time_local}\] "%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:http_version}" %{NUMBER_MINUS:status} %{NUMBER_MINUS:body_bytes_sent} "%{NOTDQUOTE:http_referer}" "%{NOTDQUOTE:http_user_agent}"'
nodes:
- grok: # CLF parser
pattern: '%{NGINXACCESS2} %{NUMBER:number_of_requests_received_since_traefik_started} "%{TRAEFIK_ROUTER:traefik_router_name}" "%{TRAEFIK_SERVER_URL:traefik_server_url}" %{NUMBER:request_duration_in_ms}ms'
apply_on: message
- filter: UnmarshalJSON(evt.Line.Raw, evt.Unmarshaled, "traefik") in ["", nil]
statics:
- parsed: remote_addr
expression: evt.Unmarshaled.traefik.ClientHost
- parsed: dest_addr
## Split dest_addr to get IP only as this is original functionality
expression: Split(evt.Unmarshaled.traefik.ClientAddr, ':')[0]
- parsed: request_addr
expression: evt.Unmarshaled.traefik.RequestAddr
- parsed: service_addr
## Split service_addr to get IP only as this is original functionality
expression: "evt.Unmarshaled.traefik.ServiceAddr != nil ? Split(evt.Unmarshaled.traefik.ServiceAddr, ':')[0] : nil"
- parsed: http_user_agent
expression: evt.Unmarshaled.traefik["request_User-Agent"] ## We have to access via [] as the key contains a dash
- parsed: body_bytes_sent
## We have to check if DownstreamContentSize is nil, as it will cause EXPR error if it is
expression: "evt.Unmarshaled.traefik.DownstreamContentSize != nil ? int(evt.Unmarshaled.traefik.DownstreamContentSize) : nil"
- parsed: request_duration_in_ms
expression: int(evt.Unmarshaled.traefik.Duration)
- parsed: traefik_router_name
expression: evt.Unmarshaled.traefik.RouterName
- parsed: time_local
expression: evt.Unmarshaled.traefik.time
- parsed: verb
expression: evt.Unmarshaled.traefik.RequestMethod
- parsed: request
expression: evt.Unmarshaled.traefik.RequestPath
- parsed: http_version
## Split http_version to get version only as this is original functionality
expression: Split(evt.Unmarshaled.traefik.RequestProtocol, '/')[1]
- parsed: status
expression: int(evt.Unmarshaled.traefik.DownstreamStatus)
statics:
- meta: service
value: http
- meta: http_status
expression: "evt.Parsed.status"
- meta: http_path
expression: "evt.Parsed.request"
- meta: user
expression: "evt.Parsed.remote_user"
- meta: source_ip
expression: "evt.Parsed.remote_addr"
- meta: http_user_agent
expression: "evt.Parsed.http_user_agent"
- meta: log_type
value: http_access-log
- target: evt.StrTime
expression: "evt.Parsed.time_local"
- meta: traefik_router_name
expression: "evt.Parsed.traefik_router_name"
- meta: http_verb
expression: "evt.Parsed.verb"

View File

@@ -0,0 +1,11 @@
filter: "evt.StrTime != ''"
name: crowdsecurity/dateparse-enrich
#debug: true
#it's a hack lol
statics:
- method: ParseDate
expression: evt.StrTime
- target: MarshaledTime
expression: evt.Enriched.MarshaledTime
- meta: timestamp
expression: evt.Enriched.MarshaledTime

View File

@@ -0,0 +1,27 @@
filter: "'source_ip' in evt.Meta"
name: crowdsecurity/geoip-enrich
description: "Populate event with geoloc info : as, country, coords, source range."
data:
- source_url: https://crowdsec-statics-assets.s3-eu-west-1.amazonaws.com/GeoLite2-City.mmdb
dest_file: GeoLite2-City.mmdb
- source_url: https://crowdsec-statics-assets.s3-eu-west-1.amazonaws.com/GeoLite2-ASN.mmdb
dest_file: GeoLite2-ASN.mmdb
statics:
- method: GeoIpCity
expression: evt.Meta.source_ip
- meta: IsoCode
expression: evt.Enriched.IsoCode
- meta: IsInEU
expression: evt.Enriched.IsInEU
- meta: GeoCoords
expression: evt.Enriched.GeoCoords
- method: GeoIpASN
expression: evt.Meta.source_ip
- meta: ASNNumber
expression: evt.Enriched.ASNNumber
- meta: ASNOrg
expression: evt.Enriched.ASNOrg
- method: IpToRange
expression: evt.Meta.source_ip
- meta: SourceRange
expression: evt.Enriched.SourceRange

View File

@@ -0,0 +1,33 @@
filter: "evt.Meta.service == 'http' && evt.Meta.log_type in ['http_access-log', 'http_error-log']"
description: "Parse more Specifically HTTP logs, such as HTTP Code, HTTP path, HTTP args and if its a static ressource"
name: crowdsecurity/http-logs
pattern_syntax:
DIR: "^.*/"
FILE: "[^/].*?"
EXT: "\\.[^.]*$|$"
nodes:
- statics:
- parsed: "impact_completion"
# the value of a field can as well be determined as the result of an expression
expression: "evt.Meta.http_status in ['404', '403', '502'] ? 'false' : 'true'"
- target: evt.Parsed.static_ressource
value: 'false'
# let's split the path?query if possible
- grok:
pattern: "^%{GREEDYDATA:request}\\?%{GREEDYDATA:http_args}$"
apply_on: request
# this is another node, with its own pattern_syntax
- #debug: true
grok:
pattern: "%{DIR:file_dir}(%{FILE:file_frag}%{EXT:file_ext})?"
apply_on: request
statics:
- meta: http_path
expression: "evt.Parsed.http_path"
# meta af
- meta: http_args_len
expression: "len(evt.Parsed.http_args)"
- parsed: file_name
expression: evt.Parsed.file_frag + evt.Parsed.file_ext
- parsed: static_ressource
expression: "Upper(evt.Parsed.file_ext) in ['.JPG', '.CSS', '.JS', '.JPEG', '.PNG', '.SVG', '.MAP', '.ICO', '.OTF', '.GIF', '.MP3', '.MP4', '.WOFF', '.WOFF2', '.TTF', '.OTF', '.EOT', '.WEBP', '.WAV', '.GZ', '.BROTLI', '.BVR', '.TS', '.BMP'] ? 'true' : 'false'"

View File

@@ -0,0 +1,14 @@
name: crowdsecurity/whitelists
description: "Whitelist events from private ipv4 addresses"
whitelist:
reason: "private ipv4/ipv6 ip/ranges"
ip:
- "127.0.0.1"
- "::1"
cidr:
- "192.168.0.0/16"
- "10.0.0.0/8"
- "172.16.0.0/12"
# expression:
# - "'foo.com' in evt.Meta.source_ip.reverse"

View File

@@ -0,0 +1,9 @@
onsuccess: next_stage
filter: "evt.Overflow.Alert.Remediation == true && evt.Overflow.Alert.GetScope() == 'Ip'"
name: crowdsecurity/rdns
description: "Lookup the DNS associated to the source IP only for overflows"
statics:
- method: reverse_dns
expression: evt.Overflow.Alert.Source.IP
- meta: reverse_dns
expression: evt.Enriched.reverse_dns

View File

@@ -0,0 +1,14 @@
name: crowdsecurity/cdn-whitelist
description: "Whitelist CDN providers"
whitelist:
reason: "CDN provider"
expression:
- "any(File('cloudflare_ips.txt'), { IpInRange(evt.Overflow.Alert.Source.IP ,#)})"
- "any(File('cloudflare_ip6s.txt'), { IpInRange(evt.Overflow.Alert.Source.IP ,#)})"
data:
- source_url: https://www.cloudflare.com/ips-v4
dest_file: cloudflare_ips.txt
type: string
- source_url: https://www.cloudflare.com/ips-v6
dest_file: cloudflare_ip6s.txt
type: string

View File

@@ -0,0 +1,18 @@
name: crowdsecurity/seo-bots-whitelist
description: "Whitelist good search engine crawlers"
whitelist:
reason: "good bots (search engine crawlers)"
expression:
- "any(File('rdns_seo_bots.txt'), { len(#) > 0 && evt.Enriched.reverse_dns endsWith #})"
- "RegexpInFile(evt.Enriched.reverse_dns, 'rdns_seo_bots.regex')"
- "any(File('ip_seo_bots.txt'), { len(#) > 0 && IpInRange(evt.Overflow.Alert.Source.IP ,#)})"
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/whitelists/benign_bots/search_engine_crawlers/rdns_seo_bots.txt
dest_file: rdns_seo_bots.txt
type: string
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/whitelists/benign_bots/search_engine_crawlers/rnds_seo_bots.regex
dest_file: rdns_seo_bots.regex
type: regexp
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/whitelists/benign_bots/search_engine_crawlers/ip_seo_bots.txt
dest_file: ip_seo_bots.txt
type: string

View File

@@ -0,0 +1,11 @@
type: trigger
format: 2.0
name: crowdsecurity/CVE-2019-18935
description: "Detect Telerik CVE-2019-18935 exploitation attempts"
filter: |
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && Upper(QueryUnescape(evt.Meta.http_path)) startsWith Upper('/Telerik.Web.UI.WebResource.axd?type=rau')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,10 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-26134
description: "Detect CVE-2022-26134 exploits"
filter: "Upper(PathUnescape(evt.Meta.http_path)) contains Upper('@java.lang.Runtime@getRuntime().exec(')"
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,10 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-35914
description: "Detect CVE-2022-35914 exploits"
filter: "Upper(evt.Meta.http_path) contains Upper('/vendor/htmlawed/htmlawed/htmLawedTest.php')"
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,18 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-37042
description: "Detect CVE-2022-37042 exploits"
filter: |
(
Upper(evt.Meta.http_path) contains Upper('/service/extension/backup/mboximport?account-name=admin&ow=2&no-switch=1&append=1') ||
Upper(evt.Meta.http_path) contains Upper('/service/extension/backup/mboximport?account-name=admin&account-status=1&ow=cmd')
)
and evt.Meta.http_status startsWith ('40') and
Upper(evt.Meta.http_verb) == 'POST'
blackhole: 2m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,11 @@
type: trigger
name: crowdsecurity/fortinet-cve-2022-40684
description: "Detect cve-2022-40684 exploitation attempts"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
Upper(evt.Meta.http_path) startsWith Upper('/api/v2/cmdb/system/admin/') and Lower(evt.Parsed.http_user_agent) == 'report runner'
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,13 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-41082
description: "Detect CVE-2022-41082 exploits"
filter: |
Upper(evt.Meta.http_path) contains Upper('/autodiscover/autodiscover.json') &&
Upper(evt.Parsed.http_args) contains Upper('powershell')
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,14 @@
type: leaky
name: crowdsecurity/CVE-2022-41697
description: "Detect CVE-2022-41697 enumeration"
filter: |
Upper(evt.Meta.http_path) contains Upper('/ghost/api/admin/session') &&
Upper(evt.Parsed.verb) == 'POST' &&
evt.Meta.http_status == '404'
leakspeed: "10s"
capacity: 5
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,17 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-42889
description: "Detect CVE-2022-42889 exploits (Text4Shell)"
filter: |
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${script:javascript:java.lang.Runtime.getRuntime().exec(')
or
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${script:js:java.lang.Runtime.getRuntime().exec(')
or
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${url:UTF-8:')
or
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${dns:address|')
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,15 @@
type: trigger
#debug: true
name: crowdsecurity/CVE-2022-44877
description: "Detect CVE-2022-44877 exploits"
filter: |
Lower(evt.Meta.http_path) contains '/index.php' &&
Upper(evt.Parsed.verb) == 'POST' &&
evt.Meta.http_status == '302' &&
Lower(evt.Parsed.http_args) matches 'login=.*[$|%24][\\(|%28].*[\\)|%29]'
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,29 @@
type: leaky
name: crowdsecurity/CVE-2022-46169-bf
description: "Detect CVE-2022-46169 brute forcing"
filter: |
Upper(evt.Meta.http_path) contains Upper('/remote_agent.php') &&
Upper(evt.Parsed.verb) == 'GET' &&
Lower(evt.Parsed.http_args) contains 'host_id' &&
Lower(evt.Parsed.http_args) contains 'local_data_ids'
leakspeed: "10s"
capacity: 5
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true
---
type: trigger
name: crowdsecurity/CVE-2022-46169-cmd
description: "Detect CVE-2022-46169 cmd injection"
filter: |
Upper(evt.Meta.http_path) contains Upper('/remote_agent.php') &&
Upper(evt.Parsed.verb) == 'GET' &&
Lower(evt.Parsed.http_args) contains 'action=polldata' &&
Lower(evt.Parsed.http_args) matches 'poller_id=.*(;|%3b)'
blackhole: 1m
groupby: "evt.Meta.source_ip"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,23 @@
type: trigger
format: 2.0
#debug: true
name: crowdsecurity/apache_log4j2_cve-2021-44228
description: "Detect cve-2021-44228 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(
any(File("log4j2_cve_2021_44228.txt"), { Upper(evt.Meta.http_path) contains Upper(#)})
or
any(File("log4j2_cve_2021_44228.txt"), { Upper(evt.Parsed.http_user_agent) contains Upper(#)})
or
any(File("log4j2_cve_2021_44228.txt"), { Upper(evt.Parsed.http_referer) contains Upper(#)})
)
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/log4j2_cve_2021_44228.txt
dest_file: log4j2_cve_2021_44228.txt
type: string
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,13 @@
#contribution by @ltsich
type: leaky
name: crowdsecurity/dovecot-spam
description: "detect errors on dovecot"
debug: false
filter: "evt.Meta.log_type == 'dovecot_logs' && evt.Meta.dovecot_login_result == 'auth_failed'"
groupby: evt.Meta.source_ip
capacity: 3
leakspeed: "360s"
blackhole: 5m
labels:
type: scan
remediation: true

View File

@@ -0,0 +1,16 @@
type: trigger
format: 2.0
name: crowdsecurity/f5-big-ip-cve-2020-5902
description: "Detect cve-2020-5902 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(
Upper(evt.Meta.http_path) matches Upper('/tmui/login.jsp/..;/tmui/[^.]+.jsp\\?(fileName|command|directoryPath|tabId)=')
or
Upper(evt.Meta.http_path) matches Upper('/tmui/login.jsp/%2E%2E;/tmui/[^.]+.jsp\\?(fileName|command|directoryPath|tabId)=')
)
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,12 @@
type: trigger
format: 2.0
name: crowdsecurity/fortinet-cve-2018-13379
description: "Detect cve-2018-13379 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
Upper(evt.Meta.http_path) contains Upper('/remote/fgt_lang?lang=/../../../..//////////dev/cmdb/sslvpn_websession')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,14 @@
type: trigger
format: 2.0
name: crowdsecurity/grafana-cve-2021-43798
description: "Detect cve-2021-43798 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(Upper(evt.Meta.http_path) matches '/PUBLIC/PLUGINS/[^/]+/../[./]+/'
or
Upper(evt.Meta.http_path) matches '/PUBLIC/PLUGINS/[^/]+/%2E%2E/[%2E/]+/')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,18 @@
type: leaky
#debug: true
name: crowdsecurity/http-backdoors-attempts
description: "Detect attempt to common backdoors"
filter: 'evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("backdoors.txt"), { evt.Parsed.file_name == #})'
groupby: "evt.Meta.source_ip"
distinct: evt.Parsed.file_name
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/backdoors.txt
dest_file: backdoors.txt
type: string
capacity: 1
leakspeed: 5s
blackhole: 5m
labels:
service: http
type: discovery
remediation: true

View File

@@ -0,0 +1,20 @@
type: leaky
format: 2.0
#debug: true
name: crowdsecurity/http-bad-user-agent
description: "Detect bad user-agents"
filter: 'evt.Meta.log_type in ["http_access-log", "http_error-log"] && RegexpInFile(evt.Parsed.http_user_agent, "bad_user_agents.regex.txt")'
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/bad_user_agents.regex.txt
dest_file: bad_user_agents.regex.txt
type: regexp
strategy: LRU
size: 40
ttl: 10s
capacity: 1
leakspeed: 1m
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: scan
remediation: true

View File

@@ -0,0 +1,16 @@
type: leaky
name: crowdsecurity/http-crawl-non_statics
description: "Detect aggressive crawl from single ip"
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && evt.Parsed.static_ressource == 'false' && evt.Parsed.verb in ['GET', 'HEAD']"
distinct: "evt.Parsed.file_name"
leakspeed: 0.5s
capacity: 40
#debug: true
#this limits the memory cache (and event_sequences in output) to five events
cache_size: 5
groupby: "evt.Meta.source_ip + '/' + evt.Parsed.target_fqdn"
blackhole: 1m
labels:
service: http
type: crawl
remediation: true

View File

@@ -0,0 +1,15 @@
type: trigger
format: 2.0
#debug: true
name: crowdsecurity/http-cve-2021-41773
description: "cve-2021-41773"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(Upper(evt.Meta.http_path) contains "/.%2E/.%2E/"
or
Upper(evt.Meta.http_path) contains "/%2E%2E/%2E%2E")
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: scan
remediation: true

View File

@@ -0,0 +1,14 @@
type: trigger
format: 2.0
#debug: true
#this is getting funny, it's the third patch on top of cve-2021-41773
name: crowdsecurity/http-cve-2021-42013
description: "cve-2021-42013"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
Upper(evt.Meta.http_path) contains "/%%32%65%%32%65/"
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: scan
remediation: true

View File

@@ -0,0 +1,44 @@
# 404 scan
type: leaky
#debug: true
name: crowdsecurity/http-generic-bf
description: "Detect generic http brute force"
filter: "evt.Meta.service == 'http' && evt.Meta.sub_type == 'auth_fail'"
groupby: evt.Meta.source_ip
capacity: 5
leakspeed: "10s"
blackhole: 1m
labels:
service: http
type: bf
remediation: true
---
# Generic 401 Authorization Errors
type: leaky
#debug: true
name: LePresidente/http-generic-401-bf
description: "Detect generic 401 Authorization error brute force"
filter: "evt.Meta.log_type == 'http_access-log' && evt.Parsed.verb == 'POST' && evt.Meta.http_status == '401'"
groupby: evt.Meta.source_ip
capacity: 5
leakspeed: "10s"
blackhole: 1m
labels:
service: http
type: bf
remediation: true
---
# Generic 403 Forbidden (Authorization) Errors
type: leaky
#debug: true
name: LePresidente/http-generic-403-bf
description: "Detect generic 403 Forbidden (Authorization) error brute force"
filter: "evt.Meta.log_type == 'http_access-log' && evt.Parsed.verb == 'POST' && evt.Meta.http_status == '403'"
groupby: evt.Meta.source_ip
capacity: 5
leakspeed: "10s"
blackhole: 1m
labels:
service: http
type: bf
remediation: true

View File

@@ -0,0 +1,10 @@
type: trigger
name: crowdsecurity/http-open-proxy
description: "Detect scan for open proxy"
#apache returns 405, nginx 400
filter: "evt.Meta.log_type == 'http_access-log' && evt.Meta.http_status in ['400','405'] && (evt.Parsed.verb == 'CONNECT' || evt.Parsed.request matches '^http[s]?://')"
blackhole: 2m
labels:
service: http
type: scan
remediation: true

View File

@@ -0,0 +1,20 @@
# path traversal probing
type: leaky
#debug: true
name: crowdsecurity/http-path-traversal-probing
description: "Detect path traversal attempt"
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && any(File('http_path_traversal.txt'),{evt.Meta.http_path contains #})"
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/path_traversal.txt
dest_file: http_path_traversal.txt
type: string
groupby: "evt.Meta.source_ip"
distinct: "evt.Meta.http_path"
capacity: 3
reprocess: true
leakspeed: 10s
blackhole: 2m
labels:
service: http
type: scan
remediation: true

View File

@@ -0,0 +1,16 @@
# 404 scan
type: leaky
#debug: true
name: crowdsecurity/http-probing
description: "Detect site scanning/probing from a single ip"
filter: "evt.Meta.service == 'http' && evt.Meta.http_status in ['404', '403', '400'] && evt.Parsed.static_ressource == 'false'"
groupby: "evt.Meta.source_ip + '/' + evt.Parsed.target_fqdn"
distinct: "evt.Meta.http_path"
capacity: 10
reprocess: true
leakspeed: "10s"
blackhole: 5m
labels:
service: http
type: scan
remediation: true

View File

@@ -0,0 +1,19 @@
type: leaky
format: 2.0
#debug: true
name: crowdsecurity/http-sensitive-files
description: "Detect attempt to access to sensitive files (.log, .db ..) or folders (.git)"
filter: 'evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("sensitive_data.txt"), { evt.Parsed.request endsWith #})'
groupby: "evt.Meta.source_ip"
distinct: evt.Parsed.request
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/sensitive_data.txt
dest_file: sensitive_data.txt
type: string
capacity: 4
leakspeed: 5s
blackhole: 5m
labels:
service: http
type: discovery
remediation: true

View File

@@ -0,0 +1,20 @@
type: leaky
#requires at least 2.0 because it's using the 'data' section and the 'Upper' expr helper
format: 2.0
name: crowdsecurity/http-sqli-probbing-detection
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/sqli_probe_patterns.txt
dest_file: sqli_probe_patterns.txt
type: string
description: "A scenario that detects SQL injection probing with minimal false positives"
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && any(File('sqli_probe_patterns.txt'), {Upper(evt.Parsed.http_args) contains Upper(#)})"
groupby: evt.Meta.source_ip
capacity: 10
leakspeed: 1s
blackhole: 5m
#low false positives approach : we require distinct payloads to avoid false positives
distinct: evt.Parsed.http_args
labels:
service: http
type: sqli_probing
remediation: true

View File

@@ -0,0 +1,20 @@
type: leaky
#requires at least 2.0 because it's using the 'data' section and the 'Upper' expr helper
format: 2.0
name: crowdsecurity/http-xss-probbing
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/xss_probe_patterns.txt
dest_file: xss_probe_patterns.txt
type: string
description: "A scenario that detects XSS probing with minimal false positives"
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && any(File('xss_probe_patterns.txt'), {Upper(evt.Parsed.http_args) contains Upper(#)})"
groupby: evt.Meta.source_ip
capacity: 5
leakspeed: 1s
blackhole: 5m
#low false positives approach : we require distinct payloads to avoid false positives
distinct: evt.Parsed.http_args
labels:
service: http
type: xss_probing
remediation: true

View File

@@ -0,0 +1,16 @@
type: trigger
format: 2.0
#debug: true
name: crowdsecurity/jira_cve-2021-26086
description: "Detect Atlassian Jira CVE-2021-26086 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("jira_cve_2021-26086.txt"), {Upper(evt.Meta.http_path) contains Upper(#)})
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/jira_cve_2021-26086.txt
dest_file: jira_cve_2021-26086.txt
type: string
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,13 @@
type: trigger
format: 2.0
name: crowdsecurity/netgear_rce
description: "Detect Netgear RCE DGN1000/DGN220 exploitation attempts"
filter: |
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && Lower(QueryUnescape(evt.Meta.http_path)) startsWith Lower('/setup.cgi?next_file=netgear.cfg&todo=syscmd&cmd=')
groupby: "evt.Meta.source_ip"
blackhole: 2m
references:
- "https://www.exploit-db.com/exploits/25978"
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,13 @@
type: leaky
#debug: true
name: crowdsecurity/nginx-req-limit-exceeded
description: "Detects IPs which violate nginx's user set request limit."
filter: evt.Meta.sub_type == 'req_limit_exceeded'
leakspeed: "60s"
capacity: 5
groupby: evt.Meta.source_ip
blackhole: 5m
labels:
service: nginx
type: bruteforce
remediation: true

View File

@@ -0,0 +1,33 @@
# postfix spam
type: leaky
name: crowdsecurity/postfix-spam
description: "Detect spammers"
filter: "evt.Meta.log_type_enh == 'spam-attempt' || evt.Meta.log_type == 'postfix' && evt.Meta.action == 'reject'"
leakspeed: "10s"
references:
- https://en.wikipedia.org/wiki/Spamming
capacity: 5
groupby: evt.Meta.source_ip
blackhole: 1m
reprocess: false
labels:
service: postfix
type: bruteforce
remediation: true
---
# postfix spam
type: trigger
name: crowdsecurity/postscreen-rbl
description: "Detect spammers"
filter: "evt.Meta.service == 'postscreen' && evt.Meta.pregreet == 'PREGREET'"
leakspeed: "10s"
references:
- https://en.wikipedia.org/wiki/Spamming
groupby: evt.Meta.source_ip
blackhole: 1m
reprocess: false
labels:
service: postscreen
type: bruteforce
remediation: true

View File

@@ -0,0 +1,14 @@
type: trigger
format: 2.0
name: crowdsecurity/pulse-secure-sslvpn-cve-2019-11510
description: "Detect cve-2019-11510 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(Upper(evt.Meta.http_path) matches Upper('/dana-na/../dana/html5acc/guacamole/../../../../../../../[^?]+\\?/dana/html5acc/guacamole/')
or
Upper(evt.Meta.http_path) matches Upper('/dana-na/%2E%2E/dana/html5acc/guacamole/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/[^?]+\\?/dana/html5acc/guacamole/'))
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,12 @@
type: trigger
format: 2.0
name: crowdsecurity/spring4shell_cve-2022-22965
description: "Detect cve-2022-22965 probing"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
(Upper(evt.Meta.http_path) contains 'CLASS.MODULE.CLASSLOADER.')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,32 @@
# ssh bruteforce
type: leaky
name: crowdsecurity/ssh-bf
description: "Detect ssh bruteforce"
filter: "evt.Meta.log_type == 'ssh_failed-auth'"
leakspeed: "10s"
references:
- http://wikipedia.com/ssh-bf-is-bad
capacity: 5
groupby: evt.Meta.source_ip
blackhole: 1m
reprocess: true
labels:
service: ssh
type: bruteforce
remediation: true
---
# ssh user-enum
type: leaky
name: crowdsecurity/ssh-bf_user-enum
description: "Detect ssh user enum bruteforce"
filter: evt.Meta.log_type == 'ssh_failed-auth'
groupby: evt.Meta.source_ip
distinct: evt.Meta.target_user
leakspeed: 10s
capacity: 5
blackhole: 1m
labels:
service: ssh
type: bruteforce
remediation: true

View File

@@ -0,0 +1,32 @@
# ssh bruteforce
type: leaky
name: crowdsecurity/ssh-slow-bf
description: "Detect slow ssh bruteforce"
filter: "evt.Meta.log_type == 'ssh_failed-auth'"
leakspeed: "60s"
references:
- http://wikipedia.com/ssh-bf-is-bad
capacity: 10
groupby: evt.Meta.source_ip
blackhole: 1m
reprocess: true
labels:
service: ssh
type: bruteforce
remediation: true
---
# ssh user-enum
type: leaky
name: crowdsecurity/ssh-slow-bf_user-enum
description: "Detect slow ssh user enum bruteforce"
filter: evt.Meta.log_type == 'ssh_failed-auth'
groupby: evt.Meta.source_ip
distinct: evt.Meta.target_user
leakspeed: 60s
capacity: 10
blackhole: 1m
labels:
service: ssh
type: bruteforce
remediation: true

View File

@@ -0,0 +1,16 @@
type: trigger
format: 2.0
#debug: true
name: crowdsecurity/thinkphp-cve-2018-20062
description: "Detect ThinkPHP CVE-2018-20062 exploitation attemps"
filter: |
evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("thinkphp_cve_2018-20062.txt"), {Upper(evt.Meta.http_path) matches Upper(#)})
data:
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/thinkphp_cve_2018-20062.txt
dest_file: thinkphp_cve_2018-20062.txt
type: string
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,11 @@
type: trigger
format: 2.0
name: crowdsecurity/vmware-cve-2022-22954
description: "Detect Vmware CVE-2022-22954 exploitation attempts"
filter: |
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && Upper(QueryUnescape(evt.Meta.http_path)) startsWith Upper('/catalog-portal/ui/oauth/verify?error=&deviceUdid=${"freemarker.template.utility.Execute"?new()(')
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,11 @@
type: trigger
format: 2.0
name: crowdsecurity/vmware-vcenter-vmsa-2021-0027
description: "Detect VMSA-2021-0027 exploitation attemps"
filter: |
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && evt.Meta.http_path matches '/ui/vcav-bootstrap/rest/vcav-providers/provider-logo\\?url=(file|http)'
groupby: "evt.Meta.source_ip"
blackhole: 2m
labels:
type: exploit
remediation: true

View File

@@ -0,0 +1,12 @@
#contributed by ltsich
type: trigger
name: ltsich/http-w00tw00t
description: "detect w00tw00t"
debug: false
filter: "evt.Meta.log_type == 'http_access-log' && evt.Parsed.file_name contains 'w00tw00t.at.ISC.SANS.DFind'"
groupby: evt.Meta.source_ip
blackhole: 5m
labels:
service: http
type: scan
remediation: true