Drop spiders, we don't need them anymore, and add a grafana config
This commit is contained in:
@@ -1,16 +1,12 @@
|
||||
# Define networks to be used later below
|
||||
networks:
|
||||
# Networks will get included from snippets
|
||||
|
||||
|
||||
challenges:
|
||||
# Challenges will get included from snippets
|
||||
dnsbl:
|
||||
runtime: dnsbl
|
||||
parameters:
|
||||
dnsbl-host: "dnsbl.dronebl.org"
|
||||
dnsbl-decay: 1h
|
||||
dnsbl-timeout: 1s
|
||||
|
||||
conditions:
|
||||
# Conditions will get replaced on rules AST when found as ($condition-name)
|
||||
|
||||
# Conditions will get included from snippets
|
||||
|
||||
|
||||
is-static-asset:
|
||||
- 'path == "/apple-touch-icon.png"'
|
||||
@@ -51,25 +47,13 @@ rules:
|
||||
- 'path.startsWith("/hls/")'
|
||||
action: pass
|
||||
|
||||
- name: desired-crawlers
|
||||
- name: allow-private-networks
|
||||
conditions:
|
||||
- *is-bot-googlebot
|
||||
- *is-bot-bingbot
|
||||
- *is-bot-duckduckbot
|
||||
- *is-bot-kagibot
|
||||
- *is-bot-qwantbot
|
||||
- *is-bot-yandexbot
|
||||
# Allows localhost and private networks CIDR
|
||||
- *is-network-localhost
|
||||
- *is-network-private
|
||||
action: pass
|
||||
|
||||
# Matches private networks and localhost.
|
||||
# Uncomment this if you want to let your own tools this way
|
||||
#- name: allow-private-networks
|
||||
# conditions:
|
||||
# # Allows localhost and private networks CIDR
|
||||
# - *is-network-localhost
|
||||
# - *is-network-private
|
||||
# action: pass
|
||||
|
||||
- name: undesired-crawlers
|
||||
conditions:
|
||||
- '($is-headless-chromium)'
|
||||
@@ -94,6 +78,15 @@ rules:
|
||||
- 'userAgent.contains("Amazonbot") || userAgent.contains("Google-Extended") || userAgent.contains("PanguBot") || userAgent.contains("AI2Bot") || userAgent.contains("Diffbot") || userAgent.contains("cohere-training-data-crawler") || userAgent.contains("Applebot-Extended")'
|
||||
# SEO / Ads and marketing
|
||||
- 'userAgent.contains("BLEXBot")'
|
||||
# Yandex isn't catched, and doesn't seem to care about robots.txt
|
||||
- 'userAgent.contains("YandexBot/3.0; +http://yandex.com/bots)"'
|
||||
# At this point I'd rather not have any search browser crawl the frontend.
|
||||
- *is-bot-googlebot
|
||||
- *is-bot-bingbot
|
||||
- *is-bot-duckduckbot
|
||||
- *is-bot-kagibot
|
||||
- *is-bot-qwantbot
|
||||
- *is-bot-yandexbot
|
||||
action: drop
|
||||
|
||||
- name: unknown-crawlers
|
||||
@@ -110,7 +103,7 @@ rules:
|
||||
- name: 0
|
||||
action: check
|
||||
settings:
|
||||
challenges: [js-refresh]
|
||||
challenges: [js-refresh, js-pow-sha256]
|
||||
- name: 1
|
||||
action: check
|
||||
settings:
|
||||
@@ -129,12 +122,12 @@ rules:
|
||||
# if DNSBL fails, check additional challenges
|
||||
fail: check
|
||||
fail-settings:
|
||||
challenges: [js-refresh]
|
||||
challenges: [js-refresh, js-pow-sha256]
|
||||
|
||||
- name: suspicious-fetchers
|
||||
action: check
|
||||
settings:
|
||||
challenges: [js-refresh]
|
||||
challenges: [js-refresh, js-pow-sha256]
|
||||
conditions:
|
||||
- 'userAgent.contains("facebookexternalhit/") || userAgent.contains("facebookcatalog/")'
|
||||
|
||||
@@ -151,6 +144,12 @@ rules:
|
||||
context-set:
|
||||
# Map OpenGraph or similar <meta> tags back to the reply, even if denied/challenged
|
||||
proxy-meta-tags: "true"
|
||||
response-headers:
|
||||
# Solves the varnish bug even if we pulled it through a different way.
|
||||
reddit-stats:
|
||||
- io=1
|
||||
via:
|
||||
- 1.1 varnish
|
||||
|
||||
# Set additional response headers
|
||||
#response-headers:
|
||||
@@ -177,5 +176,5 @@ rules:
|
||||
- name: standard-browser
|
||||
action: challenge
|
||||
settings:
|
||||
challenges: [preload-link, meta-refresh, resource-load, js-refresh]
|
||||
challenges: [preload-link, meta-refresh, resource-load, js-refresh, js-pow-sha256]
|
||||
conditions:
|
||||
|
@@ -119,6 +119,7 @@ apps:
|
||||
FRONT_PAGE: popular
|
||||
COMMENT_SORT: new
|
||||
PUSHSHIFT_FRONTEND: "undelete.pullpush.io"
|
||||
ROBOTS_DISABLE_INDEXING: on
|
||||
BLUR_NSFW: on
|
||||
USE_HLS: on
|
||||
AUTOPLAY_VIDEOS: off
|
||||
@@ -126,11 +127,13 @@ apps:
|
||||
image: git.projectsegfau.lt/midou/go-away:latest
|
||||
ports:
|
||||
- "6464:9980"
|
||||
- "9893:9893"
|
||||
mounts:
|
||||
- "{{data_dir}}/redlib/cache:/cache"
|
||||
- "{{configs_dir}}/redlib/policy.yml:/policy.yml:ro"
|
||||
environment:
|
||||
GOAWAY_BIND: ":9980"
|
||||
GOAWAY_METRICS_BIND: ":9893"
|
||||
GOAWAY_BIND_NETWORK: "tcp"
|
||||
GOAWAY_CLIENT_IP_HEADER: "X-Real-Ip"
|
||||
GOAWAY_POLICY: "/policy.yml"
|
||||
|
Reference in New Issue
Block a user