import requests
from fake_useragent import UserAgent
from bs4 import BeautifulSoup
# 生成随机用户代理
ua = UserAgent()
# 设置请求头,包括随机用户代理
headers = {
'User-Agent': ua.random,
}
# 目标网页URL
url = 'http://10.8.240.254/root/caserules'
username = 'your_username'
password = 'your_password'
# 设置认证信息
auth = (username, password)
# 发起请求
response = requests.get(url, headers=headers, auth=auth)
# 检查响应状态码
if response.status_code == 200:
# 使用BeautifulSoup解析网页内容
soup = BeautifulSoup(response.content, 'html.parser')
# 在这里可以编写提取数据的逻辑
# 例如:找到所有的标题
titles = soup.find_all('h2')
for title in titles:
print(title.text)
else:
print('Failed to retrieve the webpage. Status code:', response.status_code)