diff options
author | gesang <gesang@itinerariummentis.org> | 2024-06-18 10:48:08 +0000 |
---|---|---|
committer | gesang <gesang@itinerariummentis.org> | 2024-06-18 10:48:08 +0000 |
commit | 062167341a45cd03a08bcca9db8b2b71af288d43 (patch) | |
tree | 598846e6a1f24485da547bd782cde86e99e2e412 /data/config.php | |
parent | 50590374a68ead77d3dedba40fff1d1e3d1f8dd5 (diff) | |
parent | 40da72e8554e9526379397a8c68f2895292a79c2 (diff) |
Merge remote-tracking branch 'refs/remotes/origin/master'
Diffstat (limited to 'data/config.php')
-rw-r--r-- | data/config.php | 33 |
1 files changed, 17 insertions, 16 deletions
diff --git a/data/config.php b/data/config.php index 7521489..a3b1080 100644 --- a/data/config.php +++ b/data/config.php @@ -36,9 +36,9 @@ class config{ // Eg. data/captcha/birds/1.png up to 2263.png const CAPTCHA_DATASET = [ // example: - // ["birds", 2263], - // ["fumo_plushies", 1006], - // ["minecraft", 848] + //["birds", 2263], + //["fumo_plushies", 1006], + //["minecraft", 848] ]; // If this regex expression matches on the user agent, it blocks the request @@ -49,18 +49,18 @@ class config{ // Eg: ["x-forwarded-for", "x-via", "forwarded-for", "via"]; // Useful for blocking *some* proxies used for botting const FILTERED_HEADER_KEYS = [ - "x-forwarded-for", - "x-cluster-client-ip", - "x-client-ip", - "x-real-ip", - "client-ip", - "real-ip", - "forwarded-for", - "forwarded-for-ip", - "forwarded", - "proxy-connection", - "remote-addr", - "via" + //"x-forwarded-for", + //"x-cluster-client-ip", + //"x-client-ip", + //"x-real-ip", + //"client-ip", + //"real-ip", + //"forwarded-for", + //"forwarded-for-ip", + //"forwarded", + //"proxy-connection", + //"remote-addr", + //"via" ]; // Maximal number of searches per captcha key/pass issued. Counter gets @@ -106,7 +106,8 @@ class config{ "https://4get.lol", "https://4get.ch", "https://4get.edmateo.site", - "https://4get.sudovanilla.org" + "https://4get.sudovanilla.org", + "https://search.mint.lgbt" ]; // Default user agent to use for scraper requests. Sometimes ignored to get specific webpages |