import requests
from fake_useragent import UserAgent
from bs4 import BeautifulSoup
proxy_get=requests.get('http://193.112.219.93:5000/get')
proxy={
'http':'http://'+proxy_get.text
}
ua=UserAgent()
headers={
'user-agent':ua.chrome
}
url="https://search.51job.com/list/000000,000000,0000,00,9,99,python,2,1.html?lang=c&stype=&postchannel=0000&workyear=99&cotype=99°reefrom=99&jobterm=99&companysize=99&providesalary=99&lonlat=0%2C0&radius=-1&ord_field=0&confirmdate=9&fromType=&dibiaoid=0&address=&line=&specialarea=00&from=&welfare="
html=requests.get(url=url,headers=headers,proxies=proxy)
soup=BeautifulSoup(html.content.decode('gbk'),'lxml')