private string RequestGet(string TheURL, string TheProxy)
{
Uri uri = new Uri(TheURL);
HttpWebRequest request = (HttpWebRequest) WebRequest.Create(uri);
string page;
try
{
request.KeepAlive = false;
request.ProtocolVersion=HttpVersion.Version10;
request.Method = "GET";
request.ContentType = "application/x-www-form-urlencoded";
request.Proxy = System.Net.WebProxy.GetDefaultProxy();
//allow auto redirects from redirect headers
request.AllowAutoRedirect=true;
//maximum of 10 auto redirects
request.MaximumAutomaticRedirections=10;
//30 second timeout for request
request.Timeout=(int) new TimeSpan(0,0,60).TotalMilliseconds;
//give the crawler a name.
request.UserAgent = "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)";
//request.UserAgent="Mozilla/3.0 (compatible; My Browser/1.0)";
HttpWebResponse response = (HttpWebResponse) request.GetResponse();
Stream responseStream = response.GetResponseStream();
StreamReader readStream = new StreamReader (responseStream, System.Text.Encoding.Default);
page = readStream.ReadToEnd();
}
catch (Exception ee)
{
page = "Fail message : "+ee.Message;
}
return page;
}
此博客展示了一段ASP.NET代码,定义了RequestGet方法用于发送HTTP GET请求获取页面内容。代码中设置了请求的各种属性,如协议版本、请求方法、超时时间等,还处理了自动重定向和异常情况,最终返回页面内容或错误信息。
110

被折叠的 条评论
为什么被折叠?



