Skip to content

Commit

Permalink
Merge pull request #790 from rix1337/dev
Browse files Browse the repository at this point in the history
v.20.0.1 - Fix DW link handling
  • Loading branch information
rix1337 authored Jun 30, 2024
2 parents 10b91de + 58c551f commit 0ed87bd
Show file tree
Hide file tree
Showing 8 changed files with 24 additions and 10 deletions.
7 changes: 4 additions & 3 deletions feedcrawler/external_sites/feed_search/content_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from feedcrawler.providers.common_functions import fullhd_title
from feedcrawler.providers.common_functions import is_hevc
from feedcrawler.providers.common_functions import is_retail
from feedcrawler.providers.common_functions import remove_suffix
from feedcrawler.providers.common_functions import replace_with_stripped_ascii
from feedcrawler.providers.myjd_connection import myjd_download
from feedcrawler.providers.notifications import notify
Expand Down Expand Up @@ -77,7 +78,7 @@ def search_imdb(self, desired_rating, feed):
shared_state.logger.debug("Fehler beim Abruf von " + post.title + ": Kein Durchsuchbarer Inhalt gefunden.")
content = False
if content:
post.title = replace_with_stripped_ascii(post.title).replace(" ", ".")
post.title = remove_suffix(replace_with_stripped_ascii(post.title).replace(" ", "."), "_MIRROR")

if self.search_imdb_done:
shared_state.logger.debug(
Expand Down Expand Up @@ -208,7 +209,7 @@ def search_feed(self, feed):
shared_state.logger.debug("Fehler beim Abruf von " + post.title + ": Kein Durchsuchbarer Inhalt gefunden.")
content = False
if content:
post.title = replace_with_stripped_ascii(post.title).replace(" ", ".")
post.title = remove_suffix(replace_with_stripped_ascii(post.title).replace(" ", "."), "_MIRROR")

if self.search_regular_done:
shared_state.logger.debug(
Expand Down Expand Up @@ -641,7 +642,7 @@ def download_imdb(self, key, download_links, source, imdb_id, size, hevc_retail,
f"{'/Englisch/Retail' if englisch and retail else ''}"
f"{'/Retail' if not englisch and retail else ''}"
f"{'/HEVC' if hevc_retail else ''}"
f"] - {key} -"
f"] - {key} - "
f"[{site}] - {size} - {source}"
)
shared_state.logger.info(log_entry)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,13 @@ def dw_get_download_links(self, content, title):
response = json.loads(post_url(ajax_url, payload))
if response["success"]:
link = response["data"].split(",")[0]

if dw in link:
match = re.search(r'https://' + dw + r'/azn/af\.php\?v=([A-Z0-9]+)(#.*)?', link)
if match:
link = 'https://filecrypt.cc/' + 'Container/' + match.group(1) + '.html' + (
match.group(2) if match.group(2) else '')

hoster = button.nextSibling.img["src"].split("/")[-1].replace(".png", "")
download_links.append([link, hoster])
except:
Expand Down
6 changes: 6 additions & 0 deletions feedcrawler/providers/common_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -623,6 +623,12 @@ def unify(string):
return search_term in release_title


def remove_suffix(input_string, suffix):
if input_string.endswith(suffix):
return input_string[:-len(suffix)]
return input_string


def replace_with_stripped_ascii(string):
string = string.strip()

Expand Down
4 changes: 2 additions & 2 deletions feedcrawler/providers/http_requests/cache_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,10 +101,10 @@ def cached_request(url, method='get', params=None, headers=None, redirect_url=Fa
allow_sponsors_helper_run = True

if method == 'post':
response = request(url, method="POST", data=params, timeout=10, headers=headers,
response = request(url, method="POST", data=params, timeout=60, headers=headers,
cookiejar=cookiejar, proxies=proxies, force_ipv4=force_ipv4)
else:
response = request(url, timeout=10, headers=headers, cookiejar=cookiejar, proxies=proxies,
response = request(url, timeout=60, headers=headers, cookiejar=cookiejar, proxies=proxies,
force_ipv4=force_ipv4)

if response.status_code == 403 or 'id="challenge-body-text"' in response.text:
Expand Down
2 changes: 1 addition & 1 deletion feedcrawler/providers/myjd_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def set_device(myjd_user, myjd_pass, myjd_device):
device.downloadcontroller.get_current_state() # request forces direct_connection info update
connection_info = device.check_direct_connection()
if connection_info["status"]:
print("Nutze direkte Verbindung zu JDownloader: " + connection_info["ip"])
print(f"JDownloader direkt über {connection_info['ip']} verfügbar.")
else:
print("Keine direkte Verbindung zu JDownloader möglich")
shared_state.set_device(device)
Expand Down
2 changes: 1 addition & 1 deletion feedcrawler/providers/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@


def get_version():
return "20.0.0"
return "20.0.1"


def create_version_file():
Expand Down
4 changes: 2 additions & 2 deletions feedcrawler/web_interface/vuejs_frontend/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion feedcrawler/web_interface/vuejs_frontend/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "feedcrawler-web",
"version": "20.0.0",
"version": "20.0.1",
"type": "module",
"scripts": {
"dev": "vite",
Expand Down

0 comments on commit 0ed87bd

Please sign in to comment.