[M]use usable proxy until it become unusable

This commit is contained in:
Ching 2021-06-11 11:26:51 +08:00
parent e8bf2c2044
commit 86a5aa055b

View File

@ -52,22 +52,30 @@ session = requests.session()
def get_proxy(): def get_proxy():
return requests.get("http://118.24.52.95:5010/get/?type=https").json() return requests.get("http://118.24.52.95:5010/get/?type=https").json()
usable_proxy = None
def toot(proxy=None): def toot(proxy=None):
jandan_pic_url = 'https://jandan.net/pic' jandan_pic_url = 'https://jandan.net/pic'
page_count = 2 page_count = 2
status_list = [] status_list = []
id_list = load_id() id_list = load_id()
global usable_proxy
if not id_list: if not id_list:
id_list = [] id_list = []
session.headers.update(ramdom_ua()) session.headers.update(ramdom_ua())
session.proxies.update({"https": "https://{}".format(proxy)}) if proxy:
session.proxies.update({"https": "https://{}".format(proxy)})
while page_count and jandan_pic_url: while page_count and jandan_pic_url:
resp = session.get(jandan_pic_url, timeout=2) resp = session.get(jandan_pic_url, timeout=2)
if resp.status_code != 200: if resp.status_code != 200:
usable_proxy = None
if resp.status_code == 403:
raise ValueError('http status code 403')
time.sleep(10) time.sleep(10)
continue continue
usable_proxy = proxy
soup = BeautifulSoup(resp.content, 'html.parser') soup = BeautifulSoup(resp.content, 'html.parser')
jandan_pic_url = soup.find(class_='previous-comment-page')['href'] jandan_pic_url = soup.find(class_='previous-comment-page')['href']
page_count -= 1 page_count -= 1
@ -134,8 +142,10 @@ def toot(proxy=None):
while True: while True:
try: try:
logger.info('%s !!!! start', time.asctime()) logger.info('%s !!!! start', time.asctime())
proxy = get_proxy() if not usable_proxy:
toot(proxy=proxy['proxy']) proxy = get_proxy()
usable_proxy = proxy.get('proxy')
toot(proxy=usable_proxy)
time.sleep(20) time.sleep(20)
except Exception as ex: except Exception as ex:
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())