import aiohttp
import asyncio
import time
from bs4 import BeautifulSoup
import re
import requests
#限制启动线程数
sema=asyncio.Semaphore(100)
#判断链接是否正常打开
async def get_url(url):
# conn=aiohttp.TCPConnector(limit_per_host=10)
async with sema:
async with aiohttp.ClientSession() as session:
async with session.get(url,timeout=None) as rep:
if rep.status==200:
print('%s' % url)
print('success')
else:
print('%s ' % url)
print('fail')
#异步获取最大页数
# async def get_page_max(url):
# async with aiohttp.ClientSession() as session:
# async with session.get(url) as rep:
# if rep.status==200:
# page_soup=BeautifulSoup(await rep.text(),'html.parser')
# page_max=page_soup.find('div',class_='thPages').find_all('a')[-3].text
# return page_max
#