asyncio
- 单独使用socket,可以实现协程的IO操作(需要字节封装Http)
import asyncio
@asyncio.coroutine
def fetch_async(host, url='/'):
print(host, url)
reader, writer = yield from asyncio.open_connection(host, 80)
request_header_content = """GET %s HTTP/1.0\r\nHost: %s\r\n\r\n""" % (url, host,)
request_header_content = bytes(request_header_content, encoding='utf-8')
writer.write(request_header_content)
yield from writer.drain()
text = yield from reader.read()
print(host, url, text)
writer.close()
tasks = [
fetch_async('www.cnblogs.com', '/wupeiqi/'),
fetch_async('dig.chouti.com', '/pic/show?nid=4073644713430508&lid=10273091')
]
loop = asyncio.get_event_loop()
results = loop.run_until_complete(asyncio.gather(*tasks))
loop.close()
- 配合aiphttp,可以实现协程发送HTTP请求
import aiohttp
import asyncio
@asyncio.coroutine
def fetch_async(url):
print(url)
response = yield from aiohttp.request('GET', url)
# data = yield from response.read()
# print(url, data)
print(url, response)
response.close()
tasks = [fetch_async('http://www.google.com/'), fetch_async('http://www.chouti.com/')]
event_loop = asyncio.get_event_loop()
results = event_loop.run_until_complete(asyncio.gather(*tasks))
event_loop.close()
- 配合requests,可以实现协程发送request请求
import asyncio
import requests
@asyncio.coroutine
def fetch_async(func, *args):
loop = asyncio.get_event_loop()
future = loop.run_in_executor(None, func, *args)
response = yield from future
print(response.url, response.content)
tasks = [
fetch_async(requests.get, 'http://www.cnblogs.com/wupeiqi/'),
fetch_async(requests.get, 'http://dig.chouti.com/pic/show?nid=4073644713430508&lid=10273091')
]
loop = asyncio.get_event_loop()
results = loop.run_until_complete(asyncio.gather(*tasks))
loop.close()
gevent
- 使用gevent完成异步IO操作
import gevent
import requests
from gevent import monkey
monkey.patch_all()
def task(method, url, req_kwargs):
print(method, url, req_kwargs)
response = requests.request(method=method, url=url, **req_kwargs)
print(response.url, response.content)
# ##### 发送请求 #####
# gevent.joinall([
# gevent.spawn(task, method='get', url='https://www.python.org/', req_kwargs={}),
# gevent.spawn(task, method='get', url='https://www.yahoo.com/', req_kwargs={}),
# gevent.spawn(task, method='get', url='https://github.com/', req_kwargs={}),
# ])
# ##### 发送请求(协程池控制最大协程数量) #####
from gevent.pool import Pool
pool = Pool(5)
gevent.joinall([
pool.spawn(task, method='get', url='https://www.python.org/', req_kwargs={}),
pool.spawn(task, method='get', url='https://www.yahoo.com/', req_kwargs={}),
pool.spawn(task, method='get', url='https://www.github.com/', req_kwargs={}),
])
joinall等待参数中的协程运行完毕
- 使用grequests完成IO操作
import grequests
request_list = [
grequests.get('http://httpbin.org/delay/1', timeout=0.001),
grequests.get('http://fakedomain/'),
grequests.get('http://httpbin.org/status/500')
]
# ##### 执行并获取响应列表 #####
response_list = grequests.map(request_list,size=5)
print(response_list)
Twisted
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from twisted.internet import defer
from twisted.web.client import getPage
from twisted.internet import reactor
def one_done(arg):
print(arg)
def all_done(arg):
print('done')
reactor.stop()
@defer.inlineCallbacks
def task(url):
res = getPage(bytes(url, encoding='utf8')) # 立即发送Http请求,立即添加
res.addCallback(one_done)
yield res
url_list = [
'http://www.cnblogs.com',
'http://www.cnblogs.com',
'http://www.cnblogs.com',
'http://www.cnblogs.com',
]
defer_list = [] # [特殊,特殊,特殊(已经向url发送请求)]
for url in url_list:
v = task(url)
defer_list.append(v)
d = defer.DeferredList(defer_list)
d.addBoth(all_done)
reactor.run() # 死循环 ,需要在all_done中设置退出
# 该方法会循环查找,d中的哪些请求得到了响应
Tornado
与twisted类似
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from tornado.httpclient import AsyncHTTPClient
from tornado.httpclient import HTTPRequest
from tornado import ioloop
COUNT = 0
def handle_response(response):
global COUNT
COUNT -= 1
if response.error:
print("Error:", response.error)
else:
print(response.body)
# 方法同twisted
# ioloop.IOLoop.current().stop()
if COUNT == 0:
ioloop.IOLoop.current().stop()
def func():
url_list = [
'http://www.baidu.com',
'http://www.bing.com',
]
global COUNT
COUNT = len(url_list)
for url in url_list:
print(url)
http_client = AsyncHTTPClient()
http_client.fetch(HTTPRequest(url), handle_response)
ioloop.IOLoop.current().add_callback(func)
ioloop.IOLoop.current().start() # 死循环