-
Notifications
You must be signed in to change notification settings - Fork 17
/
config_sample.py
81 lines (61 loc) · 2.34 KB
/
config_sample.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
# -*- coding: utf-8 -*-
# 0 no debug, 100 full debug
debug = 0
mongo = {
"db": "btdht-crawler",
"host": "127.0.0.1",
"port": 27017,
"user": None,
"pwd": None,
}
# crawler udp base port. port will be choosen (deterministically)
# between crawler_base_port and crawler_base_port + 255
# if you lauch multiple worker it may happen that the first
# byte of the id are equals beetween 2 worken
# then you will have an error saying port already in use
# just remove one of the crawler%d.id and restart
crawler_base_port = [12345]
# number of process to span for crawling the dht
# one by proc is good, more is useless, increase
# the instance number instead
crawler_worker = 1
# power of 2 if the number of dht instance de lauch by worker
# while crawling: 4 is for 2^4=16 instances
crawler_instance = 3 # 8 instances
# max resident memory in byte
crawler_max_memory = 8 * 1024 * 1024 * 1024 # 8GB
data_dir = "data/"
# where to write torrents retreived from dht or torcache
torrents_dir = "torrents/"
# where to move processed torrents
torrents_done = "torrents_done/"
# where to archive torrent. The script will create
# one subdirectory by day in this directory
torrents_archive = "torrents_archives/"
# If the torrent has successfully been uploaded to torcache
# the liste of pieces will be removed in the torrent file
# before archiving. It allow to keep all the interesting
# metadata of the .torrent (name, list of file) but drastically
# reduce its size. The file will be unusable in a torrent client
compact_archived_torrents = True
# which dir to watch for new torrent
# this is here you can had custom torrent by hand
# juste by coping them in that ddirectory
torrents_new = "torrents_new/"
# directory use to store tracker full scrape files
torrents_scrape = "torrents_scrape/"
torrents_error = "torrents_errors/"
scrape_trackers = [
"udp://tracker.leechers-paradise.org:6969/announce",
"udp://tracker.zer0day.to:1337/announce",
"udp://tracker.coppersurfer.tk:6969/announce",
"udp://tracker.opentrackr.org:1337/announce",
"udp://tracker.internetwarriors.net:1337/announce",
"udp://tracker.sktorrent.net:6969/announce",
"udp://tracker.pirateparty.gr:6969/announce",
"udp://tracker.desu.sh:6969",
]
data_dir = "data/"
ignored_ip = {"188.165.207.160"}
# if None, all private ip networks are ignored
ignored_net = None