Skip to content

Commit

Permalink
Initial rotating proxy implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
andreademasi committed Jan 11, 2022
1 parent e0bae9e commit 43c5403
Show file tree
Hide file tree
Showing 4 changed files with 125 additions and 2 deletions.
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
requests==2.25.1
gate_api==4.22.2
PyYAML==6.0
PySocks==1.7.1
pytest==6.2.5
5 changes: 5 additions & 0 deletions src/gateio_new_coins_announcements_bot/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from gateio_new_coins_announcements_bot.store_order import store_order
from gateio_new_coins_announcements_bot.trade_client import get_last_price
from gateio_new_coins_announcements_bot.trade_client import place_order
import rotating_proxy

# To add a coin to ignore, add it to the json array in old_coins.json
globals.old_coins = load_old_coins()
Expand All @@ -41,6 +42,9 @@
else:
session = {}

# Init proxy fetching
rotating_proxy.init_proxy()

# Keep the supported currencies loaded in RAM so no time is wasted fetching
# currencies.json from disk when an announcement is made
logger.debug("Starting get_all_currencies")
Expand Down Expand Up @@ -497,6 +501,7 @@ def main():
search_and_update()
except KeyboardInterrupt:
logger.info("Stopping Threads")
rotating_proxy.event.set()
globals.stop_threads = True
globals.buy_ready.set()
globals.sell_ready.set()
Expand Down
16 changes: 14 additions & 2 deletions src/gateio_new_coins_announcements_bot/new_listings_scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from gateio_new_coins_announcements_bot.load_config import load_config
from gateio_new_coins_announcements_bot.logger import logger
from gateio_new_coins_announcements_bot.store_order import load_order
import rotating_proxy

config = load_config("config.yml")
client = load_gateio_creds("auth/auth.yml")
Expand Down Expand Up @@ -47,10 +48,21 @@ def get_announcement():
random.shuffle(queries)
logger.debug(f"Queries: {queries}")
request_url = (
f"https://www.binancezh.com/gateway-api/v1/public/cms/article/list/query"
f"http://www.binance.com/gateway-api/v1/public/cms/article/list/query"
f"?{queries[0]}&{queries[1]}&{queries[2]}&{queries[3]}&{queries[4]}&{queries[5]}"
)
latest_announcement = requests.get(request_url)
if rotating_proxy.is_ready():
proxy = rotating_proxy.get_proxy()
print(f"Using proxy: {proxy}")
try:

latest_announcement = requests.get(
request_url, proxies={"http": "socks5://" + proxy}
)
except Exception as e:
logger.error(e)
else:
latest_announcement = requests.get(request_url)
try:
logger.debug(f'X-Cache: {latest_announcement.headers["X-Cache"]}')
except KeyError:
Expand Down
105 changes: 105 additions & 0 deletions src/rotating_proxy.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
import random
from typing import Callable
import requests
import threading
import time
import itertools

import urllib.request
import gateio_new_coins_announcements_bot.globals as globals
from gateio_new_coins_announcements_bot.logger import logger


_proxy_list = {}
_proxy = None
event = threading.Event()


def init_proxy():
threading.Thread(target=lambda: _every(60 * 10, _fetch_proxies)).start()


def _fetch_proxies():
logger.info(f"Fetching proxies...")
global _proxy_list
global _proxy
_proxy_list = {}
threads = []
try:
proxy_res = requests.get(
"https://www.proxyscan.io/api/proxy?last_check=180&limit=20&type=socks5&format=txt&ping=1000"
).text
except requests.exceptions.RequestException as e:
logger.error(e)
print(proxy_res)

for p in proxy_res.split("\n"):
_proxy_list[p] = p

"""
list = proxy_res.split("\n")
if len(list) > 0:
for p in list:
t = threading.Thread(target=checker, args=[p])
t.start()
threads.append(t)
for t in threads:
t.join()
"""
logger.info(f"Fetched {len(_proxy_list)} proxies")
_proxy = itertools.cycle(_proxy_list.keys())


def get_proxy() -> str:
return next(_proxy)


def is_ready() -> bool:
return len(_proxy_list) > 0


# can be generalized and moved to separate file
def _every(delay: int, task: Callable):
global event
next_time = time.time() + delay
while not globals.stop_threads:
event.wait(max(0, next_time - time.time()))
try:
task()
except Exception:
logger.error("Problem while fetching proxies")
# skip tasks if we are behind schedule:
next_time += (time.time() - next_time) // delay * delay + delay
logger.info(f"Proxies fetching thread has stopped.")


def checker(proxy):
global _proxy_list
user_agent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/37.0.2062.94 Chrome/37.0.2062.94 Safari/537.36"
site = "https://binance.com/"
proxy_support = urllib.request.ProxyHandler({"https": proxy})
opener = urllib.request.build_opener(proxy_support)
urllib.request.install_opener(opener)
req = urllib.request.Request("https://" + site)
req.add_header("User-Agent", user_agent)
try:
start_time = time.time()
urllib.request.urlopen(req, timeout=1000)
end_time = time()
time_taken = end_time - start_time
print("%s works!" % proxy)
print("time: " + str(time_taken))
print("user_agent: " + user_agent + "\n")
_proxy_list[proxy] = proxy
return
except Exception as e:
print(e)
pass
print("%s does not respond.\n" % proxy)
return


# Required for populating the proxy list when starting bot
_fetch_proxies()

0 comments on commit 43c5403

Please sign in to comment.