summaryrefslogtreecommitdiff
path: root/data/config.php
blob: 6cde87df78d8bdff5e79b05e471f84b6e14e1a1d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
<?php
class config{
	// Welcome to the 4get configuration file
	// When updating your instance, please make sure this file isn't missing
	// any parameters.
	
	// 4get version. Please keep this updated
	const VERSION = 8;
	
	// Will be shown pretty much everywhere.
	const SERVER_NAME = "4get";
	
	// Will be shown in <meta> tag on home page
	const SERVER_SHORT_DESCRIPTION = "They live in our walls!";
	
	// Will be shown in server list ping (null for no description)
	const SERVER_LONG_DESCRIPTION = null;
	
	// Add your own themes in "static/themes". Set to "Dark" for default theme.
	// Eg. To use "static/themes/Cream.css", specify "Cream".
	const DEFAULT_THEME = "Dark";
	
	// Enable the API?
	const API_ENABLED = true;
	
	//
	// BOT PROTECTION
	//
	
	// 0 = disabled, 1 = ask for image captcha, @TODO: 2 = invite only (users needs a pass)
	// VERY useful against a targetted attack
	const BOT_PROTECTION = 0;
	
	// if BOT_PROTECTION is set to 1, specify the available datasets here
	// images should be named from 1.png to X.png, and be 100x100 in size
	// Eg. data/captcha/birds/1.png up to 2263.png
	const CAPTCHA_DATASET = [
		// example:
		// ["birds", 2263],
		// ["fumo_plushies", 1006],
		// ["minecraft", 848]
	];
	
	// If this regex expression matches on the user agent, it blocks the request
	// Not useful at all against a targetted attack
	const HEADER_REGEX = '/bot|wget|curl|python-requests|scrapy|go-http-client|ruby|yahoo|spider|qwant/i';
	
	// Block clients who present any of the following headers in their request (SPECIFY IN !!lowercase!!)
	// Eg: ["x-forwarded-for", "x-via", "forwarded-for", "via"];
	// Useful for blocking *some* proxies used for botting
	const FILTERED_HEADER_KEYS = [
		"x-forwarded-for",
		"x-cluster-client-ip",
		"x-client-ip",
		"x-real-ip",
		"client-ip",
		"real-ip",
		"forwarded-for",
		"forwarded-for-ip",
		"forwarded",
		"proxy-connection",
		"remote-addr",
		"via"
	];
	
	// Maximal number of searches per captcha key/pass issued. Counter gets
	// reset on every APCU cache clear (should happen once a day).
	// Only useful when BOT_PROTECTION is NOT set to 0
	const MAX_SEARCHES = 100;
	
	// List of domains that point to your servers. Include your tor/i2p
	// addresses here! Must be a valid URL. Won't affect links placed on
	// the homepage.
	const ALT_ADDRESSES = [
		//"https://4get.alt-tld",
		//"http://4getwebfrq5zr4sxugk6htxvawqehxtdgjrbcn2oslllcol2vepa23yd.onion"
	];
	
	// Known 4get instances. MUST use the https protocol if your instance uses
	// it. Is used to generate a distributed list of instances.
	// To appear in the list of an instance, contact the host and if everyone added
	// eachother your serber should appear everywhere.
	const INSTANCES = [
		"https://4get.ca",
		"https://4get.zzls.xyz",
		"https://4getus.zzls.xyz",
		"https://4get.silly.computer",
		"https://4get.konakona.moe",
		"https://4get.lvkaszus.pl",
		"https://4g.ggtyler.dev",
		"https://4get.perennialte.ch",
		"https://4get.sijh.net",
		"https://4get.hbubli.cc",
		"https://4get.plunked.party",
		"https://4get.seitan-ayoub.lol",
		"https://4get.etenie.pl",
		"https://4get.lunar.icu",
		"https://4get.dcs0.hu",
		"https://4get.kizuki.lol",
		"https://4get.psily.garden",
		"https://search.milivojevic.in.rs",
		"https://4get.snine.nl",
		"https://4get.datura.network",
		"https://4get.neco.lol",
		"https://4get.lol",
		"https://4get.ch",
		"https://4get.edmateo.site",
		"https://4get.sudovanilla.org"
	];
	
	// Default user agent to use for scraper requests. Sometimes ignored to get specific webpages
	// Changing this might break things.
	const USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:127.0) Gecko/20100101 Firefox/127.0";
	
	// Proxy pool assignments for each scraper
	// false = Use server's raw IP
	// string = will load a proxy list from data/proxies
	// Eg. "onion" will load data/proxies/onion.txt
	const PROXY_DDG = false; // duckduckgo
	const PROXY_BRAVE = false;
	const PROXY_FB = false; // facebook
	const PROXY_GOOGLE = false;
	const PROXY_QWANT = false;
	const PROXY_MARGINALIA = false;
	const PROXY_MOJEEK = false;
	const PROXY_SC = false; // soundcloud
	const PROXY_SPOTIFY = false;
	const PROXY_WIBY = false;
	const PROXY_CURLIE = false;
	const PROXY_YT = false; // youtube
	const PROXY_YEP = false;
	const PROXY_PINTEREST = false;
	const PROXY_SEZNAM = false;
	const PROXY_NAVER = false;
	const PROXY_GREPPR = false;
	const PROXY_CROWDVIEW = false;
	const PROXY_MWMBL = false;
	const PROXY_FTM = false; // findthatmeme
	const PROXY_IMGUR = false;
	const PROXY_YANDEX_W = false; // yandex web
	const PROXY_YANDEX_I = false; // yandex images
	const PROXY_YANDEX_V = false; // yandex videos
	
	//
	// Scraper-specific parameters
	//
	
	// MARGINALIA
	// Use "null" to default out to HTML scraping OR specify a string to
	// use the API (Eg: "public"). API has less filters.
	const MARGINALIA_API_KEY = null;
}