2022-11-25 16:33

我们在做爬虫的时候会遇到这种情况,爬取某个网站的时候,最初爬虫是正常运行的,一段时间后,网页就会出现错误,例如403fobbiden。这种情况就是服务器采取了反爬机制,再一段时间内某个ip访问服务器的次数过高,服务器就会对该ip拒绝服务并报错。ip代理就是让服务器接收到你的请求时显示出来是不同的ip地址进行的访问。

很多新手朋友不知道requests如何使用代理IP进行爬虫工作,今天来简单的说一下,接下来我会给大家详细的展示如何在使用的请求库上增加代理进行访问。亿牛云代理IP有两种授权方式,分别是以api链接方式获取IP,以优质代理IP为重点产品。还有就是以隧道模式为主的爬虫代理,经过两种方式的使用对比,个人比较推荐隧道代理, 这种代理产品自带IP池随机切换,直接配置代理信息即可使用方便快捷,参考对方提供的demo完全自动化使用简单粗暴的方式逐页爬行就可以。如何在requests里面实现看示例:

// 要访问的目标页面

string targetUrl = "http://httpbin.org/ip";



// 代理服务器(产品官网 www.16yun.cn)

string proxyHost = "http://t.16yun.cn";

string proxyPort = "31111";


// 代理验证信息

string proxyUser = "username";

string proxyPass = "password";


// 设置代理服务器

WebProxy proxy = new WebProxy(string.Format("{0}:{1}", proxyHost, proxyPort), true);



ServicePointManager.Expect100Continue = false;


var request = WebRequest.Create(targetUrl) as HttpWebRequest;


request.AllowAutoRedirect = true;

request.KeepAlive = true;

request.Method    = "GET";

request.Proxy     = proxy;


//request.Proxy.Credentials = CredentialCache.DefaultCredentials;


request.Proxy.Credentials = new System.Net.NetworkCredential(proxyUser, proxyPass);


// 设置Proxy Tunnel

// Random ran=new Random();

// int tunnel =ran.Next(1,10000);

// request.Headers.Add("Proxy-Tunnel", String.valueOf(tunnel));



//request.Timeout = 20000;

//request.ServicePoint.ConnectionLimit = 512;

//request.UserAgent = "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.82 Safari/537.36";

//request.Headers.Add("Cache-Control", "max-age=0");

//request.Headers.Add("DNT", "1");



//String encoded = System.Convert.ToBase64String(System.Text.Encoding.GetEncoding("ISO-8859-1").GetBytes(proxyUser + ":" + proxyPass));

//request.Headers.Add("Proxy-Authorization", "Basic " + encoded);


using (var response = request.GetResponse() as HttpWebResponse)

using (var sr = new StreamReader(response.GetResponseStream(), Encoding.UTF8))

{

    string htmlStr = sr.ReadToEnd();

}

// 要访问的目标页面

string targetUrl = "http://httpbin.org/ip";



// 代理服务器(产品官网 www.16yun.cn)

string proxyHost = "http://t.16yun.cn";

string proxyPort = "31111";


// 代理验证信息

string proxyUser = "username";

string proxyPass = "password";


// 设置代理服务器

WebProxy proxy = new WebProxy(string.Format("{0}:{1}", proxyHost, proxyPort), true);



ServicePointManager.Expect100Continue = false;


var request = WebRequest.Create(targetUrl) as HttpWebRequest;


request.AllowAutoRedirect = true;

request.KeepAlive = true;

request.Method    = "GET";

request.Proxy     = proxy;


//request.Proxy.Credentials = CredentialCache.DefaultCredentials;


request.Proxy.Credentials = new System.Net.NetworkCredential(proxyUser, proxyPass);


// 设置Proxy Tunnel

// Random ran=new Random();

// int tunnel =ran.Next(1,10000);

// request.Headers.Add("Proxy-Tunnel", String.valueOf(tunnel));



//request.Timeout = 20000;

//request.ServicePoint.ConnectionLimit = 512;

//request.UserAgent = "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.82 Safari/537.36";

//request.Headers.Add("Cache-Control", "max-age=0");

//request.Headers.Add("DNT", "1");



//String encoded = System.Convert.ToBase64String(System.Text.Encoding.GetEncoding("ISO-8859-1").GetBytes(proxyUser + ":" + proxyPass));

//request.Headers.Add("Proxy-Authorization", "Basic " + encoded);


using (var response = request.GetResponse() as HttpWebResponse)

using (var sr = new StreamReader(response.GetResponseStream(), Encoding.UTF8))

{

    string htmlStr = sr.ReadToEnd();

}




评论