urllib 记录
import urllib.request
url = "http://www.baidu.com/"
response = urllib.request.urlopen(url)
content = response.read().decode('utf-8')
print(content)
url = 'https://www.baidu.com'
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36"
}
request = urllib.request.Request(url=url, headers=headers)
response = urllib.request.urlopen(request)
content = response.read().decode('utf8')
print(content)
get请求的quote方法
url = 'https://www.baidu.com/s?tn=49055317_28_hao_pg&ie=utf-8&wd='
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36'
}
import urllib.parse
name = urllib.parse.quote('周杰伦')
print(name)
url = url + name
request = urllib.request.Request(url=url, headers=headers)
response = urllib.request.urlopen(request)
content = response.read().decode('utf-8')
print(content)
对字典数据进行urlencode编码就是将json数据转为urlencode编码
import urllib.parse
data = {
'wd': '周杰伦',
'sex': '男'
}
result = urllib.parse.urlencode(data)
print(result)
get请求的urlencode方法使用实例
import urllib.parse
import urllib.request
base_url = "https://www.baidu.com/s?"
data = {
'wd': '周杰伦',
'sex': '男',
'location': '中国台湾省'
}
new_data = urllib.parse.urlencode(data)
url = base_url + new_data
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36'
}
request = urllib.request.Request(url=url,headers=headers)
response = urllib.request.urlopen(request)
content = response.read().decode('utf-8')
print(content)
urllib_post请求方法使用
获取百度翻译接口
在这里插入代码片
urllib post请求百度翻译
import json
import urllib.request
import urllib.parse
url = 'https://fanyi.baidu.com/sug'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36'
}
data = {
'kw': '我爱你'
}
data = urllib.parse.urlencode(data).encode('utf-8')
request = urllib.request.Request(url=url,data=data,headers=headers)
response = urllib.request.urlopen(request)
content = response.read().decode('utf-8')
print(type(content))
content = json.loads(content)
print(type(content))
print(content)
urllib post请求百度翻译之详细翻译
import urllib.request
import urllib.parse
url = 'https://fanyi.baidu.com/v2transapi?from=zh&to=en'
headers = {
'Cookie': 'BIDUPSID=EEB732549A53EA0043F3E08B1641A2BD; PSTM=1668129337; BAIDUID=EEB732549A53EA00E6279CEFC52A05E2:FG=1; BDUSS=2JmblRGRk1mfmNMa2JJY0ZDNnRIcGxoTmYzaWNPVjFaOG13YUFJR3Zqdk1qdHhqSVFBQUFBJCQAAAAAAAAAAAEAAACn7Ch4Vm9saXRpb26yu8Lkz6YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMwBtWPMAbVjT; BDUSS_BFESS=2JmblRGRk1mfmNMa2JJY0ZDNnRIcGxoTmYzaWNPVjFaOG13YUFJR3Zqdk1qdHhqSVFBQUFBJCQAAAAAAAAAAAEAAACn7Ch4Vm9saXRpb26yu8Lkz6YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMwBtWPMAbVjT; APPGUIDE_10_0_2=1; REALTIME_TRANS_SWITCH=1; FANYI_WORD_SWITCH=1; HISTORY_SWITCH=1; SOUND_SPD_SWITCH=1; SOUND_PREFER_SWITCH=1; BAIDUID_BFESS=EEB732549A53EA00E6279CEFC52A05E2:FG=1; BA_HECTOR=85818l0k0480800h00052l671hsen7l1l; ZFY=soomrl29iucGWlulnuViVnKN4R2AYdRUqwhqjQGdu5E:C; BDORZ=FFFB88E999055A3F8A630C64834BD6D0; BDRCVFR[GHvuHTY4eos]=thN3igd4QH3uhuMuLf8mvqV; delPer=0; PSINO=6; H_PS_PSSID=; Hm_lvt_64ecd82404c51e03dc91cb9e8c025574=1673425168,1674032887; Hm_lpvt_64ecd82404c51e03dc91cb9e8c025574=1674032919; ab_sr=1.0.1_YTFiNGRkZWYzMGM3NmMwOTI4OWUwYTc0MDNlNzU2MzE1MDQzNDBkNDYyYTkzYzQzZGQ4MmZhYWU0MWU2MWFlMTNmNWEzMTVkMzI5YjEyMjA5YzE5MWEwOWI1MTZmYWZjMzczY2IzNjNmZTU3ZDZhMzg0ZGY4ZmJkNTYxNjQ1MjkwYjA1NmMyODNjODhmNzFmNmY1Mzk4NmQwOTkyMzc1M2JlZmQxNmMyYzQ5NWFmMDg1YjNhZTkxNTc1MzA2NTFi; Hm_lvt_246a5e7d3670cfba258184e42d902b31=1674032921; Hm_lpvt_246a5e7d3670cfba258184e42d902b31=1674032922',
}
data = {
'from': 'zh',
'to': 'en',
'query': '我是谁',
'transtype': 'realtime',
'simple_means_flag': 3,
'sign': '325815.7046',
'token': '3255152326bd7f84caa845278f282c2c',
'domain': 'common'
}
data = urllib.parse.urlencode(data).encode('utf-8')
request = urllib.request.Request(url=url, data=data, headers=headers)
response = urllib.request.urlopen(request)
content = response.read().decode('utf-8')
print(content)
import json
obj = json.loads(content)
print(obj)