1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
|
from urllib.error import URLError
from datetime import datetime
from os import getenv
from dotenv import load_dotenv
load_dotenv()
from Overpost import get_newspaper
from MyPyload import Pyload
NEWSPAPER_PREFIX = getenv("NEWSPAPER_PREFIX") or ""
HOST_PREFERENCE = [ 'katfile.com', 'rapidgator.net', 'www.easybytez.com' ]
def scroll_list(array, buffer=1000):
array_len = len(array)
i = 0
while i < buffer:
if i >= array_len:
i = 0
yield array[i]
i += 1
def get_host(link):
return link.split("/")[2]
def filter_links(links, hosts):
host = next(hosts)
for link in links:
if get_host(link) == host:
return link
return filter_links(links, hosts)
def get_sorted_links(dictionary):
hosts = scroll_list(HOST_PREFERENCE)
return [ filter_links(links, hosts) for _, links in dictionary.items() ]
def download_link(connection, name, link):
return connection.addPackage(name=name, links=[link])
def handle_links(name, links):
try:
con = Pyload()
return [ download_link(con, name, link) for link in links ]
except URLError:
print("\nConnessione a Pyload rifiutata.")
print(len(links), "link da aggiungere manualmente:\n")
for link in links:
print(link)
print()
return []
def main():
newspapers = get_newspaper(NEWSPAPER_PREFIX, 0) # 0 -> today
name = f"{NEWSPAPER_PREFIX} - {datetime.today().strftime('%Y-%m-%d')}"
links = get_sorted_links(newspapers)
pids = handle_links(name, links)
print(len(pids), "link aggiunti a Pyload.")
print("Premi INVIO per uscire.")
input()
if __name__ == "__main__":
exit(main())
|