parent
d4190f2e82
commit
dc5169d84e
@ -0,0 +1,31 @@ |
||||
#!/bin/bash |
||||
|
||||
## check root |
||||
if [ "$EUID" -ne 0 ] |
||||
then echo "Lancer le script avec sudo svp" |
||||
exit |
||||
fi |
||||
|
||||
# update et install fail2ban |
||||
apt update -y && apt install fail2ban -y |
||||
|
||||
# copie des confs |
||||
cp nginx-conf/* /etc/fail2ban/filter.d/ |
||||
cp jail.local /etc/fail2ban/ |
||||
rm -rf /etc/fail2ban/jail.d/* |
||||
|
||||
# recup de l'ip pour la conf |
||||
IP=$(ip route get 1.2.3.4 | awk '{print $7}') |
||||
sed -i "s/IP/$IP/" /etc/fail2ban/jail.local |
||||
|
||||
# recup du port ssh |
||||
SSH=$(grep Port /etc/ssh/sshd_config); SSH=${SSH##*(Port)}; SSH=${SSH%%*(#GatewayPorts no)} |
||||
sed -i "s/SSH_PORT/$SSH/" /etc/fail2ban/jail.local |
||||
|
||||
# changement dbpurge |
||||
sed -i "s/1d/365d/" /etc/fail2ban/fail2ban.conf |
||||
|
||||
systemctl restart fail2ban |
||||
fail2ban-client status |
||||
|
||||
echo "la configuration pour les autres prisons que le SSH sont de 72h (bannissement à vie pour le SSH)" |
@ -0,0 +1,84 @@ |
||||
[DEFAULT] |
||||
|
||||
ignoreip = 127.0.0.1/8 IP |
||||
bantime = 3600 |
||||
findtime = 3600 |
||||
#mta = mail |
||||
#destemail = |
||||
#sendername = Fail2BanAlerts |
||||
#action = %(action_mwl)s |
||||
|
||||
[nginx-http-auth] |
||||
|
||||
enabled = true |
||||
filter = nginx-http-auth |
||||
port = http,https |
||||
logpath = /var/log/nginx/*error.log |
||||
bantime = 259200 |
||||
|
||||
#[nginx-badbots] |
||||
|
||||
#enabled = true |
||||
#port = http,https |
||||
#filter = nginx-badbots |
||||
#logpath = /var/log/nginx/access.log |
||||
#maxretry = 2 |
||||
|
||||
[nginx-badbots] |
||||
|
||||
enabled = true |
||||
port = http,https |
||||
filter = nginx-badbots |
||||
failregex = ^<HOST> -.*"(GET|POST|HEAD).*HTTP.*" 437 |
||||
#ignoreregex = |
||||
backend = auto |
||||
logpath = /var/log/nginx/*access.log |
||||
bantime = 259200 |
||||
maxretry= 1 |
||||
|
||||
[nginx-nohome] |
||||
|
||||
enabled = true |
||||
port = http,https |
||||
filter = nginx-nohome |
||||
logpath = /var/log/nginx/*access.log |
||||
bantime = 259200 |
||||
maxretry = 2 |
||||
|
||||
[nginx-noproxy] |
||||
|
||||
enabled = true |
||||
port = http,https |
||||
filter = nginx-noproxy |
||||
logpath = /var/log/nginx/*access.log |
||||
bantime = 259200 |
||||
maxretry = 2 |
||||
|
||||
[nginx-req-limit] |
||||
|
||||
enabled = true |
||||
filter = nginx-req-limit |
||||
action = iptables-multiport[name=ReqLimit, port="http,https", protocol=tcp] |
||||
logpath = /var/log/nginx/*error.log |
||||
findtime = 600 |
||||
bantime = 259200 |
||||
maxretry = 10 |
||||
|
||||
[nginx-conn-limit] |
||||
|
||||
enabled = true |
||||
filter = nginx-conn-limit |
||||
action = iptables-multiport[name=ConnLimit, port="http,https", protocol=tcp] |
||||
logpath = /var/log/nginx/*error.log |
||||
findtime = 300 |
||||
bantime = 259200 |
||||
maxretry = 100 |
||||
|
||||
[ssh] |
||||
|
||||
enabled = true |
||||
port = SSH_PORT |
||||
filter = sshd |
||||
logpath = /var/log/auth.log |
||||
maxretry = 3 |
||||
bantime = -1 |
@ -0,0 +1,25 @@ |
||||
# Fail2Ban configuration file |
||||
# |
||||
# Regexp to catch known spambots and software alike. Please verify |
||||
# that it is your intent to block IPs which were driven by |
||||
# above mentioned bots. |
||||
|
||||
|
||||
[Definition] |
||||
|
||||
badbotscustom = EmailCollector|WebEMailExtrac|TrackBack/1\.02|sogou music spider|(?:Mozilla/\d+\.\d+ )?Jorgee |
||||
badbots = Atomic_Email_Hunter/4\.0|atSpider/1\.0|autoemailspider|bwh3_user_agent|China Local Browse 2\.6|ContactBot/0\.2|ContentSmartz|DataCha0s/2\.0|DBrowse 1\.4b|DBrowse 1\.4d|Demo Bot DOT 16b|Demo Bot Z 16b|DSurf15a 01|DSurf15a 71|DSurf15a 81|DSurf15a VA|EBrowse 1\.4b|Educate Search VxB|EmailSiphon|EmailSpider|EmailWolf 1\.00|ESurf15a 15|ExtractorPro|Franklin Locator 1\.8|FSurf15a 01|Full Web Bot 0416B|Full Web Bot 0516B|Full Web Bot 2816B|Guestbook Auto Submitter|Industry Program 1\.0\.x|ISC Systems iRc Search 2\.1|IUPUI Research Bot v 1\.9a|LARBIN-EXPERIMENTAL \(efp@gmx\.net\)|LetsCrawl\.com/1\.0 \+http\://letscrawl\.com/|Lincoln State Web Browser|LMQueueBot/0\.2|LWP\:\:Simple/5\.803|Mac Finder 1\.0\.xx|MFC Foundation Class Library 4\.0|Microsoft URL Control - 6\.00\.8xxx|Missauga Locate 1\.0\.0|Missigua Locator 1\.9|Missouri College Browse|Mizzu Labs 2\.2|Mo College 1\.9|MVAClient|Mozilla/2\.0 \(compatible; NEWT ActiveX; Win32\)|Mozilla/3\.0 \(compatible; Indy Library\)|Mozilla/3\.0 \(compatible; scan4mail \(advanced version\) http\://www\.peterspages\.net/?scan4mail\)|Mozilla/4\.0 \(compatible; Advanced Email Extractor v2\.xx\)|Mozilla/4\.0 \(compatible; Iplexx Spider/1\.0 http\://www\.iplexx\.at\)|Mozilla/4\.0 \(compatible; MSIE 5\.0; Windows NT; DigExt; DTS Agent|Mozilla/4\.0 efp@gmx\.net|Mozilla/5\.0 \(Version\: xxxx Type\:xx\)|NameOfAgent \(CMS Spider\)|NASA Search 1\.0|Nsauditor/1\.x|PBrowse 1\.4b|PEval 1\.4b|Poirot|Port Huron Labs|Production Bot 0116B|Production Bot 2016B|Production Bot DOT 3016B|Program Shareware 1\.0\.2|PSurf15a 11|PSurf15a 51|PSurf15a VA|psycheclone|RSurf15a 41|RSurf15a 51|RSurf15a 81|searchbot admin@google\.com|ShablastBot 1\.0|snap\.com beta crawler v0|Snapbot/1\.0|Snapbot/1\.0 \(Snap Shots, \+http\://www\.snap\.com\)|sogou develop spider|Sogou Orion spider/3\.0\(\+http\://www\.sogou\.com/docs/help/webmasters\.htm#07\)|sogou spider|Sogou web spider/3\.0\(\+http\://www\.sogou\.com/docs/help/webmasters\.htm#07\)|sohu agent|SSurf15a 11 |TSurf15a 11|Under the Rainbow 2\.2|User-Agent\: Mozilla/4\.0 \(compatible; MSIE 6\.0; Windows NT 5\.1\)|VadixBot|WebVulnCrawl\.unknown/1\.0 libwww-perl/5\.803|Wells Search II|WEP Search 00 |
||||
|
||||
failregex = ^<HOST> -.*"(GET|POST|HEAD).*HTTP.*"(?:%(badbots)s|%(badbotscustom)s)"$ |
||||
|
||||
ignoreregex = |
||||
|
||||
datepattern = ^[^\[]*\[({DATE}) |
||||
{^LN-BEG} |
||||
|
||||
# DEV Notes: |
||||
# List of bad bots fetched from http://www.user-agents.org |
||||
# Generated on Thu Nov 7 14:23:35 PST 2013 by files/gen_badbots. |
||||
# |
||||
# Author: Yaroslav Halchenko |
||||
|
@ -0,0 +1,23 @@ |
||||
# Fail2Ban filter to match web requests for selected URLs that don't exist |
||||
# |
||||
|
||||
[INCLUDES] |
||||
|
||||
# Load regexes for filtering |
||||
before = botsearch-common.conf |
||||
|
||||
[Definition] |
||||
|
||||
failregex = ^<HOST> \- \S+ \[\] \"(GET|POST|HEAD) \/<block> \S+\" 404 .+$ |
||||
^ \[error\] \d+#\d+: \*\d+ (\S+ )?\"\S+\" (failed|is not found) \(2\: No such file or directory\), client\: <HOST>\, server\: \S*\, request: \"(GET|POST|HEAD) \/<block> \S+\"\, .*?$ |
||||
|
||||
ignoreregex = |
||||
|
||||
datepattern = {^LN-BEG}%%ExY(?P<_sep>[-/.])%%m(?P=_sep)%%d[T ]%%H:%%M:%%S(?:[.,]%%f)?(?:\s*%%z)? |
||||
^[^\[]*\[({DATE}) |
||||
{^LN-BEG} |
||||
|
||||
# DEV Notes: |
||||
# Based on apache-botsearch filter |
||||
# |
||||
# Author: Frantisek Sumsal |
@ -0,0 +1,10 @@ |
||||
# Fail2Ban configuration file |
||||
# # supports: ngx_http_limit_conn_module |
||||
|
||||
[Definition] |
||||
failregex = limiting connections by zone.*client: <HOST> |
||||
|
||||
# Option: ignoreregex |
||||
# Notes.: regex to ignore. If this regex matches, the line is ignored. |
||||
# Values: TEXT # ignoreregex = |
||||
|
@ -0,0 +1,17 @@ |
||||
# fail2ban filter configuration for nginx |
||||
|
||||
|
||||
[Definition] |
||||
|
||||
|
||||
failregex = ^ \[error\] \d+#\d+: \*\d+ user "(?:[^"]+|.*?)":? (?:password mismatch|was not found in "[^\"]*"), client: <HOST>, server: \S*, request: "\S+ \S+ HTTP/\d+\.\d+", host: "\S+"(?:, referrer: "\S+")?\s*$ |
||||
|
||||
ignoreregex = |
||||
|
||||
datepattern = {^LN-BEG} |
||||
|
||||
# DEV NOTES: |
||||
# Based on samples in https://github.com/fail2ban/fail2ban/pull/43/files |
||||
# Extensive search of all nginx auth failures not done yet. |
||||
# |
||||
# Author: Daniel Black |
@ -0,0 +1,46 @@ |
||||
# Fail2ban filter configuration for nginx :: limit_req |
||||
# used to ban hosts, that were failed through nginx by limit request processing rate |
||||
# |
||||
# Author: Serg G. Brester (sebres) |
||||
# |
||||
# To use 'nginx-limit-req' filter you should have `ngx_http_limit_req_module` |
||||
# and define `limit_req` and `limit_req_zone` as described in nginx documentation |
||||
# http://nginx.org/en/docs/http/ngx_http_limit_req_module.html |
||||
# |
||||
# Example: |
||||
# |
||||
# http { |
||||
# ... |
||||
# limit_req_zone $binary_remote_addr zone=lr_zone:10m rate=1r/s; |
||||
# ... |
||||
# # http, server, or location: |
||||
# location ... { |
||||
# limit_req zone=lr_zone burst=1 nodelay; |
||||
# ... |
||||
# } |
||||
# ... |
||||
# } |
||||
# ... |
||||
# |
||||
|
||||
[Definition] |
||||
|
||||
# Specify following expression to define exact zones, if you want to ban IPs limited |
||||
# from specified zones only. |
||||
# Example: |
||||
# |
||||
# ngx_limit_req_zones = lr_zone|lr_zone2 |
||||
# |
||||
ngx_limit_req_zones = [^"]+ |
||||
|
||||
# Use following full expression if you should range limit request to specified |
||||
# servers, requests, referrers etc. only : |
||||
# |
||||
# failregex = ^\s*\[[a-z]+\] \d+#\d+: \*\d+ limiting requests, excess: [\d\.]+ by zone "(?:%(ngx_limit_req_zones)s)", client: <HOST>, server: \S*, request: "\S+ \S+ HTTP/\d+\.\d+", host: "\S+"(, referrer: "\S+")?\s*$ |
||||
|
||||
# Shortly, much faster and stable version of regexp: |
||||
failregex = ^\s*\[[a-z]+\] \d+#\d+: \*\d+ limiting requests, excess: [\d\.]+ by zone "(?:%(ngx_limit_req_zones)s)", client: <HOST>, |
||||
|
||||
ignoreregex = |
||||
|
||||
datepattern = {^LN-BEG} |
@ -0,0 +1,6 @@ |
||||
[Definition] |
||||
|
||||
failregex = ^<HOST> -.*GET .*/~.* |
||||
|
||||
ignoreregex = |
||||
|
@ -0,0 +1,7 @@ |
||||
[Definition] |
||||
|
||||
failregex = ^<HOST> -.*GET http.* |
||||
|
||||
ignoreregex = |
||||
|
||||
|
@ -0,0 +1,9 @@ |
||||
# Fail2Ban configuration file # |
||||
# supports: ngx_http_limit_req_module |
||||
|
||||
[Definition] |
||||
failregex = limiting requests, excess:.* by zone.*client: <HOST> |
||||
|
||||
# Option: ignoreregex |
||||
# Notes.: regex to ignore. If this regex matches, the line is ignored. # Values: TEXT # ignoreregex = |
||||
|
Loading…
Reference in new issue