summary refs log tree commit diff stats
diff options
context:
space:
mode:
-rw-r--r--MyResolver.py2
-rw-r--r--Overpost.py4
-rw-r--r--main.py24
3 files changed, 18 insertions, 12 deletions
diff --git a/MyResolver.py b/MyResolver.py
index cb654ba..03ddfe3 100644
--- a/MyResolver.py
+++ b/MyResolver.py
@@ -5,7 +5,7 @@ class HostHeaderSSLAdapter(requests.adapters.HTTPAdapter):
         import random
         ips = [
             '104.16.89.20',  # CloudFlare
-            '151.101.2.109',  # Fastly
+            #'151.101.2.109',  # Fastly
         ]
         resolutions = {
             'overpost.biz': random.choice(ips),
diff --git a/Overpost.py b/Overpost.py
index 2448319..de4539a 100644
--- a/Overpost.py
+++ b/Overpost.py
@@ -3,9 +3,7 @@ from datetime import datetime
 from re import compile
 import os
 import feedparser
-from dotenv import load_dotenv
 from MyResolver import get
-load_dotenv()
 
 RSS_URL = os.getenv("RSS_URL") or os.path.join(".", "rss.xml")
 N_LINKS_TO_REMOVE = os.getenv("N_LINKS_TO_REMOVE") or 2
@@ -67,7 +65,7 @@ def parse_entry(entry): # entry = day
 
 def handle_url(url):
     if url.startswith("http"):
-        return get(url)
+        return get(url).text
     else:
         return url
 
diff --git a/main.py b/main.py
index 29c70e1..5e3fca7 100644
--- a/main.py
+++ b/main.py
@@ -1,8 +1,11 @@
-from Overpost import get_newspaper
-from MyPyload import Pyload
+
 from urllib.error import URLError
-from os import getenv
 from datetime import datetime
+from os import getenv
+from dotenv import load_dotenv
+load_dotenv()
+from Overpost import get_newspaper
+from MyPyload import Pyload
 
 NEWSPAPER_PREFIX = getenv("NEWSPAPER_PREFIX") or ""
 HOST_PREFERENCE = [ 'katfile.com', 'rapidgator.net', 'www.easybytez.com' ]
@@ -19,14 +22,18 @@ def scroll_list(array, buffer=1000):
 def get_host(link):
     return link.split("/")[2]
 
-def filter_links(links, host):
+def filter_links(links, hosts):
+    host = next(hosts)
     for link in links:
+        print(link, host)
         if get_host(link) == host:
             return link
+    return filter_links(links, hosts)
+        
         
 def get_sorted_links(dictionary):
     hosts = scroll_list(HOST_PREFERENCE)
-    return [ filter_links(links, next(hosts)) for _, links in dictionary.items() ]
+    return [ filter_links(links, hosts) for _, links in dictionary.items() ]
 
 def download_link(connection, name, link):
     return connection.addPackage(name=name, links=[link])
@@ -38,9 +45,10 @@ def handle_links(name, links):
     except URLError:
         print("\nConnessione a Pyload rifiutata.")
 
-    print("Link da aggiungere manualmente:\n")
-    print("\n".join(links))
-    print("")
+    print(len(links), "link da aggiungere manualmente:\n")
+    for link in links:
+        print(link)
+    print()
     return []
 
 def main():