Python3爬虫之代理的使用
2017-09-08 07:16
465 查看
import requests import gevent import re # from crawl import ua from gevent import monkey monkey.patch_all() def chack(proxy, q): try: ip = proxy.split(':')[0] port = proxy.split(':')[1] proxies = {"http": "http://%s:%s" % (ip, port), "https": "https://%s:%s" % (ip, port)} headers = ua() r = requests.get('http://jandan.net/', headers=headers, proxies=proxies, timeout=5) if r.ok: q.put(proxies) print(q.qsize()) except Exception as e: pass def chack_run(q): url = 'http://www.ip.cn/' r = requests.get(url).text reip = re.compile(r'(?<![\.\d])(?:\d{1,3}\.){3}\d{1,3}(?![\.\d]):\d{2,5}') print('\033[46mip%s\033[0m'%reip) tasks = reip.findall(r) spawns = [] for task in tasks: spawns.append(gevent.spawn(chack, task, q)) gevent.joinall(spawns) import requests import gevent import random import time import os from lxml import etree from gevent import monkey; monkey.patch_all() def ua(): USER = [ "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; AcooBrowser; .NET CLR 1.1.4322; .NET CLR 2.0.50727)", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Acoo Browser; SLCC1; .NET CLR 2.0.50727; Media Center PC 5.0; .NET CLR 3.0.04506)", "Mozilla/4.0 (compatible; MSIE 7.0; AOL 9.5; AOLBuild 4337.35; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)", "Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)", "Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 3.0.04506.30)", "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/523.15 (KHTML, like Gecko, Safari/419.3) Arora/0.3 (Change: 287 c9dfb30)", "Mozilla/5.0 (X11; U; Linux; en-US) AppleWebKit/527+ (KHTML, like Gecko, Safari/419.3) Arora/0.6", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.2pre) Gecko/20070215 K-Ninja/2.1.1", "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9) Gecko/20080705 Firefox/3.0 Kapiko/3.0", "Mozilla/5.0 (X11; Linux i686; U;) Gecko/20070322 Kazehakase/0.4.5", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.8) Gecko Fedora/1.9.0.8-1.fc10 Kazehakase/0.5.6", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 Safari/535.20", "Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; fr) Presto/2.9.168 Version/11.52", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; LBBROWSER)", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E; LBBROWSER)", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 LBBROWSER", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; QQBrowser/7.0.3698.400)", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; 360SE)", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.89 Safari/537.1", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.89 Safari/537.1", "Mozilla/5.0 (iPad; U; CPU OS 4_2_1 like Mac OS X; zh-cn) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148 Safari/6533.18.5", "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0b13pre) Gecko/20110307 Firefox/4.0b13pre", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:16.0) Gecko/20100101 Firefox/16.0", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11", "Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10" ] headers = {'User-Agent': random.choice(USER)} return headers def crawl(url, q): print('crawl') headers = ua() while q.empty(): print('ip proxy is empty') time.sleep(20) proxies = q.get() try: r = requests.get(url, headers=headers, proxies=proxies, timeout=5).content sel = etree.HTML(r) q.put(proxies) pngs = sel.xpath('//img/@src') for png in pngs: if 'http:' not in png: png = 'http:' + png r = requests.get(png, headers=headers).content if not os.path.exists('meizi'): os.mkdir('meizi') name = png.split('/')[-1] print(name) dir = 'meizi/' + name with open(dir, 'wb') as code: code.write(r) except Exception: pass def crawl_run(q): spawns = [] for ur in ('http://jandan.net/ooxx/page-%s#comments' % n for n in range(2299)): spawns.append(gevent.spawn(crawl, ur, q)) gevent.joinall(spawns) from multiprocessing import Process, Queue if __name__ == "__main__": q = Queue() p1 = Process(target=chack_run, args=(q,)) p2 = Process(target=crawl_run, args=(q,)) p1.start() p2.start()
相关文章推荐
- [python]新手写爬虫v2.5(使用代理的异步爬虫)
- 使用PYTHON3写了一个简单爬虫, 通过公司代理爬取ppt素材
- python3实现网络爬虫(7)-- 使用ip代理抓取网页
- 【python爬虫】python使用代理爬虫例子
- Python:爬虫使用代理防封IP:HTTP Error 403: Forbidden
- python爬虫之Scrapy 使用代理配置
- Python 爬虫入门(二)—— IP代理使用 - 不剃头的一休哥 - 博客园
- Python开发中爬虫使用代理proxy抓取网页的方法示例
- Python 爬虫之使用代理ip
- Python3 爬虫使用User Agent和代理IP隐藏身份
- Python爬虫——4.6使用requests和正则表达式、随机代理爬取淘宝网商品信息
- python使用代理爬虫例子
- python3爬虫(8):异常处理以及代理的使用
- Python爬虫之爬取——使用代理
- python爬虫之Scrapy 使用代理配置
- python爬虫初学(二)——使用代理
- 讲解Python的Scrapy爬虫框架使用代理进行采集的方法
- 【Python3.6爬虫学习记录】(六)urllib详细使用方法(header,代理,超时,认证,异常处理)
- 讲解Python的Scrapy爬虫框架使用代理进行采集的方法
- python爬虫之Scrapy 使用代理配置