Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rutracker direct proxies patch #60

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions torrt/base_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ def get_response(
allow_redirects: bool = True,
referer: str = None,
cookies: Union[dict, CookieJar] = None,
proxies: dict = None,
query_string: str = None,
as_soup: bool = False

Expand All @@ -179,6 +180,8 @@ def get_response(

:param cookies: cookies to use

:param proxies: proxies to specific request

:param query_string: query string (GET parameters) to add to URL

:param as_soup: whether to return BeautifulSoup object instead of Requests response
Expand All @@ -203,6 +206,7 @@ def get_response(
referer=referer,
allow_redirects=allow_redirects,
cookies=cookies,
proxies=proxies,
)

if result is not None and as_soup:
Expand Down Expand Up @@ -314,10 +318,11 @@ def parse_datetime(self, dt_str: str, fmt: str, *, locale: str = ''):
finally:
setlocale(LC_ALL, old_locale)

def get_torrent_page(self, url: str, *, drop_cache: bool = False) -> BeautifulSoup:
def get_torrent_page(self, url: str, *, proxies: dict = None, drop_cache: bool = False) -> BeautifulSoup:
"""Get torrent page as soup for further data extraction.

:param url:
:param proxies: Proxies to use
:param drop_cache: Do not use cached version if any.

"""
Expand All @@ -331,6 +336,7 @@ def get_torrent_page(self, url: str, *, drop_cache: bool = False) -> BeautifulSo
url,
referer=url,
cookies=self.cookies,
proxies=proxies,
query_string=self.get_query_string(),
as_soup=True
)
Expand Down Expand Up @@ -472,7 +478,7 @@ def get_login_form_data(self, login: str, password: str) -> dict:
def test_configuration(self) -> bool:
return self.login(self.alias)

def login(self, domain: str) -> bool:
def login(self, domain: str, proxies: dict = None) -> bool:
"""Implements tracker login procedure. Returns success bool."""

login_url = self.login_url % {'domain': domain}
Expand Down Expand Up @@ -502,7 +508,8 @@ def login(self, domain: str) -> bool:
response = self.get_response(
login_url, form_data,
allow_redirects=allow_redirects,
cookies=self.cookies
cookies=self.cookies,
proxies=proxies,
)

if not response: # e.g. Connection aborted.
Expand Down
11 changes: 6 additions & 5 deletions torrt/trackers/rutracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,19 +28,19 @@ def before_download(self, url: str):
"""Used to perform some required actions right before .torrent download."""
self.cookies['bb_dl'] = self.get_id_from_link(url) # A check that user himself have visited torrent's page ;)

def get_download_link(self, url: str) -> str:
def get_download_link(self, url: str, proxies: dict = None) -> str:
"""Tries to find .torrent file download link at forum thread page and return that one."""

page_soup = self.get_torrent_page(url)
page_soup = self.get_torrent_page(url, proxies=proxies)

domain = self.extract_domain(url)

is_anonymous = self.find_links(url, page_soup, 'register') is not None

if is_anonymous:
self.login(domain)
self.login(domain, proxies=proxies)

page_soup = self.get_torrent_page(url, drop_cache=True)
page_soup = self.get_torrent_page(url, proxies=proxies, drop_cache=True)

download_link = self.find_links(url, page_soup, r'dl\.php')

Expand All @@ -58,7 +58,7 @@ def get_form_token(self, page_soup: BeautifulSoup) -> Optional[str]:
except IndexError:
return

def download_torrent(self, url: str, referer: str = None) -> Optional[bytes]:
def download_torrent(self, url: str, proxies: dict = None, referer: str = None) -> Optional[bytes]:

self.log_debug(f'Downloading torrent file from {url} ...')

Expand All @@ -75,6 +75,7 @@ def download_torrent(self, url: str, referer: str = None) -> Optional[bytes]:
url,
form_data=form_data,
cookies=self.cookies,
proxies=proxies,
query_string=self.get_query_string(),
referer=referer,
)
Expand Down
12 changes: 9 additions & 3 deletions torrt/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ def request(
allow_redirects: bool = True,
cookies: dict = None,
headers: dict = None,
proxies: dict = None,
json: bool = None,
silence_exceptions: bool = None,
timeout: int = None,
Expand All @@ -82,6 +83,7 @@ def request(
:param allow_redirects:
:param cookies:
:param headers: Additional headers
:param proxies: Proxies to specific request
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I wonder why do you use to preposition here and in similar cases above. Shouldn't It be for a instead?

:param json: Send and receive data as JSON
:param silence_exceptions: Do not raise exceptions
:param timeout: Override timeout.
Expand All @@ -103,9 +105,13 @@ def request(
if referer:
headers['Referer'] = referer

if not self.tunnel:
# Drop globally set tunnels settings. See toolbox.tunnel().
r_kwargs['proxies'] = {'http': None, 'https': None}
# For using proxy exactly in a specific request
if not proxies:
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's make the flow not inverted:

if proxies:
    ...
elif not self.tunnel:
    ...

if not self.tunnel:
# Drop globally set tunnels settings. See toolbox.tunnel().
r_kwargs['proxies'] = {'http': None, 'https': None}
else:
r_kwargs['proxies'] = proxies

if json is None:
json = self.json
Expand Down