import requests
from pyquery import PyQuery as pq
base_headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36',
'Accept-Encoding': 'gzip, deflate, sdch',
'Accept-Language': 'zh-CN,zh;q=0.8'
}
def get_page(url):
headers = dict(base_headers)
print('Getting',url)
try:
r = requests.get(url,headers=headers)
print('Getting result',url,r.status_code)
if r.status_code == 200:
return r.text
except ConnectionError:
pritn('Cramling Failed',url)
return None # 道生一:创建抽取代理的metaclass
class ProxyMetaclass(type):
"""
元类,在FreeProxyGetter类中加入
__CrawlName__,__CrawlFunc__和__CrawlFuncCount__
三个参数,分别表示爬虫函数名,函数实例和爬虫函数的数量。
"""
# __new__控制__init__的执行,所以在其执行之前
# cls:代表要__init__的类,此参数在实例化时由Python解释器自动提供
# name:类名
# bases:代表继承父类的集合
# attrs:类的方法及属性的集合
def __new__(cls, name, bases, attrs):
count = 0
# 在attrs字典加入两个key,key的值是个列表
attrs['__CrawlFunc__'] = []
attrs['__CrawlName__'] = []
for k, v in attrs.items():
if 'crawl_' in k:
attrs['__CrawlName__'].append(k) #函数名依次添加进attrs['__CrawlName__']列表
attrs['__CrawlFunc__'].append(v) #函数实例依次添加进attrs['__CrawlFunc__']列表
print(k,v)
#print(attrs['__CrawlName__'])
count += 1
for k in attrs['__CrawlName__']:
# 剔除原有的字典键值对
attrs.pop(k)
attrs['__CrawlFuncCount__'] = count
print(attrs)
return type.__new__(cls, name, bases, attrs) # 一生二:创建代理获取类 class ProxyGetter(object, metaclass=ProxyMetaclass):
def get_raw_proxies(self, site):
proxies = []
print('Site', site)
for func in self.__CrawlFunc__:
if func.__name__==site:
this_page_proxies = func(self)
for proxy in this_page_proxies:
print('Getting', proxy, 'from', site)
proxies.append(proxy)
          print(proxies)
return proxies def crawl_daili66(self, page_count=4):
start_url = 'http://www.66ip.cn/{}.html'
urls = [start_url.format(page) for page in range(1, page_count + 1)] # format和%s的用法一样
for url in urls:
print('Crawling', url)
html = get_page(url)
if html:
doc = pq(html)
trs = doc('.containerbox table tr:gt(0)').items()
for tr in trs:
ip = tr.find('td:nth-child(1)').text()
port = tr.find('td:nth-child(2)').text()
yield ':'.join([ip, port])
  '''
def crawl_proxy360(self):
start_url = 'http://www.proxy360.cn/Region/China'
print('Crawling', start_url)
html = get_page(start_url)
if html:
doc = pq(html)
lines = doc('div[name="list_proxy_ip"]').items()
for line in lines:
ip = line.find('.tbBottomLine:nth-child(1)').text()
port = line.find('.tbBottomLine:nth-child(2)').text()
yield ':'.join([ip, port])
  '''
def crawl_goubanjia(self):
start_url = 'http://www.goubanjia.com/free/gngn/index.shtml'
html = get_page(start_url)
if html:
doc = pq(html)
tds = doc('td.ip').items()
for td in tds:
td.find('p').remove()
yield td.text().replace(' ', '') if __name__ == '__main__':
# 二生三:实例化ProxyGetter
crawler = ProxyGetter()
print(crawler.__CrawlName__)
# 三生万物
for site_label in range(crawler.__CrawlFuncCount__):
site = crawler.__CrawlName__[site_label] # site_label是列表的索引值
myProxies = crawler.get_raw_proxies(site)

运行结果

D:\pythontest>python proxy_ip.py
crawl_goubanjia <function ProxyGetter.crawl_goubanjia at 0x00000000035D2510>
crawl_daili66 <function ProxyGetter.crawl_daili66 at 0x00000000035D2488>
{'__qualname__': 'ProxyGetter', '__module__': '__main__', '__CrawlName__': ['cra
wl_goubanjia', 'crawl_daili66'], '__CrawlFunc__': [<function ProxyGetter.crawl_g
oubanjia at 0x00000000035D2510>, <function ProxyGetter.crawl_daili66 at 0x000000
00035D2488>], 'get_raw_proxies': <function ProxyGetter.get_raw_proxies at 0x0000
0000035D2400>, '__CrawlFuncCount__': }
['crawl_goubanjia', 'crawl_daili66']
Site crawl_goubanjia
Getting http://www.goubanjia.com/free/gngn/index.shtml
Getting result http://www.goubanjia.com/free/gngn/index.shtml 403
[]
Site crawl_daili66
Crawling=== http://www.66ip.cn/1.html
Getting http://www.66ip.cn/1.html
Getting result http://www.66ip.cn/1.html 200
Getting 123.163.97.198: from crawl_daili66
Getting 36.249.109.21: from crawl_daili66
Getting 163.204.245.52: from crawl_daili66
Getting 222.189.247.207: from crawl_daili66
Getting 87.250.218.12: from crawl_daili66
Getting 118.172.176.61: from crawl_daili66
Getting 134.119.214.206: from crawl_daili66
Getting 110.74.208.154: from crawl_daili66
Crawling=== http://www.66ip.cn/2.html
Getting http://www.66ip.cn/2.html
Getting result http://www.66ip.cn/2.html 200
Getting 120.234.138.102: from crawl_daili66
Getting 110.86.136.127: from crawl_daili66
Getting 59.57.38.197: from crawl_daili66
Getting 202.62.86.94: from crawl_daili66
Getting 210.22.176.146: from crawl_daili66
Getting 180.183.136.212: from crawl_daili66
Getting 183.87.153.98: from crawl_daili66
Getting 222.124.2.186: from crawl_daili66
Getting 123.169.126.9: from crawl_daili66
Getting 123.169.126.93: from crawl_daili66
Getting 158.255.249.58: from crawl_daili66
Getting 1.198.72.242: from crawl_daili66
Crawling=== http://www.66ip.cn/3.html
Getting http://www.66ip.cn/3.html
Getting result http://www.66ip.cn/3.html 200
Getting 163.204.246.10: from crawl_daili66
Getting 186.159.112.6: from crawl_daili66
Getting 163.204.246.102: from crawl_daili66
Getting 88.87.72.72: from crawl_daili66
Getting 193.169.118.6: from crawl_daili66
Getting 196.216.220.204: from crawl_daili66
Getting 185.109.62.124: from crawl_daili66
Getting 1.193.246.78: from crawl_daili66
Getting 188.131.239.119: from crawl_daili66
Getting 1.10.188.93: from crawl_daili66
Getting 182.116.237.203: from crawl_daili66
Getting 139.99.223.230: from crawl_daili66
Crawling=== http://www.66ip.cn/4.html
Getting http://www.66ip.cn/4.html
Getting result http://www.66ip.cn/4.html 200
Getting 163.204.246.232: from crawl_daili66
Getting 117.28.96.105: from crawl_daili66
Getting 202.29.220.34: from crawl_daili66
Getting 123.169.114.80: from crawl_daili66
Getting 115.42.34.3: from crawl_daili66
Getting 41.84.131.78: from crawl_daili66
Getting 123.163.96.207: from crawl_daili66
Getting 182.35.83.12: from crawl_daili66
Getting 191.241.226.230: from crawl_daili66
Getting 202.138.236.35: from crawl_daili66
Getting 194.1.193.226: from crawl_daili66
Getting 202.158.77.122: from crawl_daili66

['123.163.97.198:9999', '36.249.109.21:9999', '163.204.245.52:9999', '222.189.247.207:9999', '87.250.218.12:44168',
'118.172.176.61:8080', '134.119.214.206:1080', '110.74.208.154:21776', '120.234.138.102:53779', '110.86.136.127:9999',
'59.57.38.197:9999', '202.62.86.94:83', '210.22.176.146:37299', '180.183.136.212:8080', '183.87.153.98:49602',
'222.124.2.186:8080', '123.169.126.9:3', '123.169.126.93:9999', '158.255.249.58:50100', '1.198.72.242:9999',
'163.204.246.10:2', '186.159.112.6:53281', '163.204.246.102:9999', '88.87.72.72:8080', '193.169.118.6:53281',
'185.109.62.124:808', '1.193.246.78:9999', '188.131.239.119:8118', '1.10.188.93:34871', '182.116.237.203:9999',
'139.99.223.230:8080', '163.204.246.232:9999', '117.28.96.105:9999', '202.29.220.34:38961', '123.169.114.80:9999',
'115.42.34.3:8080', '41.84.131.78:53281', '123.163.96.207:9999', '182.35.83.12:9999', '191.241.226.230:53281',
'202.138.236.35:56413', '194.1.193.226:35646','196.216.220.204:36739', '202.158.77.122:47284']


//看来只有一个代理网站能爬到数据

最新文章

  1. mysql提供dataprovider
  2. 玩转JavaScript OOP[3]&mdash;&mdash;彻底理解继承和原型链
  3. 一句话简单理解javascript中的原型对象
  4. Visual Studio Professional 2015 key
  5. 3. Configure the Identity Service
  6. corosync+pacemaker实现高可用(HA)集群
  7. 最常用的 Eclipse 快捷键总结
  8. transition Css3过度详解
  9. Subsets,Subsets II
  10. OutputDebugString 输出信息到调试器
  11. QT实现窗口缩放打开与关闭(重叠窗口,太有意思了)
  12. 团队作业10--Beta阶段项目复审
  13. Ping pong(树状数组求序列中比某个位置上的数小的数字个数)
  14. [Asp.Net]Understanding Built-In User and Group Accounts in IIS
  15. 九.LNMP网站架构实践部署
  16. 【python】flask+nginx配置
  17. java的几种模式以及如何实现的
  18. hdu5236 Article
  19. binary search模板总结
  20. 转载:分布式文件系统 - FastDFS 在 CentOS 下配置安装部署(2)

热门文章

  1. cocos2D-X c++ call java
  2. vue基础六
  3. zabbix 如何监控php-fpm?
  4. ubuntu Linux下chromium无法使用flash解决方法
  5. 存储-docker存储(12)
  6. 【Spring Boot】Spring Boot项目设置多个配置文件,并在生产环境中的Tomcat设置对应的配置文件
  7. 【Shiro】四、Apache Shiro授权
  8. 2018icpc南京/gym101981 G Pyramid 找规律
  9. HTML-参考手册: 元素和有效 DOCTYPES
  10. Win10+CentOS7双系统引导修复