您的位置:首页 > 编程语言 > Python开发

[Python] 爬绿盟官网漏洞列表并存储MySQL与邮件通知

2017-09-25 16:04 447 查看
 
现在公司安全问题抓的越来越紧,每天去下列网址去看有没有什么应用啊操作系统啊什么的爆漏洞。。。作为一个小运维来说,挺麻烦的。
这不,这段时间学了爬虫,想了下为什么不现学现用呢?看了下就先绿盟爬爬吧。

国家互联网应急中心http://www.cert.org.cn/publish/main/index.html
国家信息安全漏洞共享平台http://www.cnvd.org.cn/
国家信息安全漏洞库 http://www.cnnvd.org.cn/index.html
Seebughttps://www.seebug.org/
绿盟 http://www.nsfocus.net/index.php?act=sec_bug
 
China B.S.A 导航网址http://www.hackerdaohang.com/
 
好了不说了,开始了。
准备Linux
环境:MySQL、Scrapy
 
一、创建与分析 url
# scrapy startproject lmsafe
# cd lmsafe
# scrapy genspider -t basic safe nsfocus.net
 
items.py

# -*- coding: utf-8 -*-
 
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html  
import scrapy
 
 
class LmsafeItem(scrapy.Item):
    # define the fields for your item here like:
    # name = scrapy.Field()
    date = scrapy.Field()
    title = scrapy.Field()
    link = scrapy.Field()
 
safe.py

# -*- coding: utf-8 -*-
import scrapy
from lmsafe.items import LmsafeItem
from scrapy.http import Request
import time,random
 
class SafeSpider(scrapy.Spider):
    name = 'safe'
    allowed_domains = ['nsfocus.net']
    #start_urls = ['http://nsfocus.net/']
  
    def start_requests(self):
      ua = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36','Connection':'keep-alive'}
      yield Request('http://www.nsfocus.net/index.php?act=sec_bug&type_id=&os=&keyword=&page=1',headers=ua)
    def parse(self, response):
      it =  LmsafeItem()
      it['date'] = response.xpath('//ul[@class="vul_list"]/li/span/text()').extract()
      it['title'] = response.xpath('//ul[@class="vul_list"]/li/a/text()').extract()
      it['link'] = response.xpath('//ul[@class="vul_list"]/li/a/@href').extract()
      yield it
      for i in range(2,16):
        url = "http://www.nsfocus.net/index.php?act=sec_bug&type_id=&os=&keyword=&page="+str(i)         
# 这个 url的话看了下就是
page变化而已
        sj = random.randint(1,5)
        time.sleep(sj)   #设置时间间隔,避免造成服务器过多压力
        yield Request(url,callback=self.parse)   #回调函数
 
settings.py        #要用pipeline就要设置这个

ITEM_PIPELINES = {
    'lmsafe.pipelines.LmsafePipeline': 300,
}
 
MySQL端配置

create database mzh character set utf8 collate utf8_bin;
use mzh;
create table safe(sj date,title char(100) primary key,link char(200) unique) default character set utf8;
desc safe;
 
+-------+-----------+------+-----+---------+-------+
| Field | Type      | Null | Key | Default | Extra |
+-------+-----------+------+-----+---------+-------+
| sj    | date      | YES  |     | NULL    |       |
| title | char(200) | NO   | PRI |         |       |
| link  | char(200) | YES  | UNI | NULL    |       |
+-------+-----------+------+-----+---------+-------+
 
 
pipelines.py

# -*- coding: utf-8 -*-
 
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymysql
 
class LmsafePipeline(object):
    def process_item(self, item, spider):
      conn = pymysql.connect(host='127.0.0.1', user='root', passwd='redhat', db='mzh',port=3306,charset='utf8')
      for i in range(len(item['date'])):
        date = item['date'][i]
        title = item['title'][i]
        link = "http://www.nsfocus.net"+item['link'][i]
        sql='insert into safe(sj,title,link) values("%s","%s","%s");'%(date,title,link)
        try:
          conn.query(sql)
          conn.commit()
        except Exception as e:
          pass
      conn.close()
      return item
 
#cd /data/lmsafe/ && scrapy crawl safe --nolog     #测试咯,不行就把
--nolog去掉看看报错
# MySQL查看: 编码显示问题可以  set
character_set_results=utf8;

mysql> use mzh;
Reading table information for completion of table and column names
You can turn off this feature to get a quicker startup with -A
 
Database changed
mysql> set character_set_results=utf8;
Query OK, 0 rows affected (0.20 sec)
 
mysql> select * from safe
    -> ;
+------------+------------------------------------------------------------------------------------------------+-------------------------------------+
| sj         | title                                                                                          | link                                |
+------------+------------------------------------------------------------------------------------------------+-------------------------------------+
| 2017-09-25 | Samba中间人攻击安全功能绕过漏洞(CVE-2017-12151)                             
| http://www.nsfocus.net/vulndb/37690 |
| 2017-09-25 | Samba中间人攻击安全功能绕过漏洞(CVE-2017-12150)                             
| http://www.nsfocus.net/vulndb/37689 |
| 2017-09-22 | Cisco UCS Central Software命令注入漏洞(CVE-2017-12255)                              
| http://www.nsfocus.net/vulndb/37688 |
| 2017-09-22 | Cisco Wide Area Application Services远程拒绝服务漏洞(CVE-2017-12250)             
|http://www.nsfocus.net/vulndb/37687 |
| 2017-09-22 | Cisco多个产品拒绝服务漏洞(CVE-2017-6720)                                        
| http://www.nsfocus.net/vulndb/37686 |
| 2017-09-22 | Cisco FindIT DLL预加载漏洞(CVE-2017-12252)                                           
| http://www.nsfocus.net/vulndb/37685 |
| 2017-09-22 | Cisco AsyncOS Software远程拒绝服务漏洞(CVE-2017-12215)                            
| http://www.nsfocus.net/vulndb/37684 |
| 2017-09-22 | Cisco Unified Customer Voice Portal远程权限提升漏洞(CVE-2017-12214)               
| http://www.nsfocus.net/vulndb/37683 |
| 2017-09-21 | Cisco Unified Intelligence Center跨站请求伪造漏洞(CVE-2017-12254)                 
| http://www.nsfocus.net/vulndb/37682 |
| 2017-09-21 | Cisco Unified Intelligence Center跨站请求伪造漏洞(CVE-2017-12253)                 
| http://www.nsfocus.net/vulndb/37681 |
| 2017-09-21 | GraphicsMagick拒绝服务漏洞(CVE-2017-14504)                                         
| http://www.nsfocus.net/vulndb/37680 |
| 2017-09-21 | Cisco Unified Intelligence Center跨站脚本漏洞(CVE-2017-12248)                       
| http://www.nsfocus.net/vulndb/37679 |
| 2017-09-21 | ImageMagick拒绝服务漏洞(CVE-2017-14532)                                            
| http://www.nsfocus.net/vulndb/37678 |
| 2017-09-21 | ImageMagick拒绝服务漏洞(CVE-2017-14528)                                            
| http://www.nsfocus.net/vulndb/37677 |
| 2017-09-21 | ImageMagick拒绝服务漏洞(CVE-2017-14505)                                            
| http://www.nsfocus.net/vulndb/37676 |
| 2017-09-21 | ImageMagick拒绝服务漏洞(CVE-2017-14531)                                            
| http://www.nsfocus.net/vulndb/37675 |
| 2017-09-21 | ImageMagick拒绝服务漏洞(CVE-2017-14533)                                            
| http://www.nsfocus.net/vulndb/37674 |
| 2017-09-21 | Apache Subversion远程代码执行漏洞(CVE-2017-9800)                                 
| http://www.nsfocus.net/vulndb/37673 |
| 2017-09-20 | Apache Tomcat远程信息泄露漏洞(CVE-2017-12616)                                    
| http://www.nsfocus.net/vulndb/37672 |
| 2017-09-20 | Apache Tomcat远程代码执行漏洞(CVE-2017-12615)                                    
| http://www.nsfocus.net/vulndb/37671 |
 
 
二、Excel生成与发送邮件
从MySQL中导出数据至Excel中并发送邮件,想了下写个脚本吧.
Loophole.py

#!/usr/bin/python3
# -*- coding: utf-8 -*-
from email.header import Header
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
from email.mime.image import MIMEImage
from email.utils import formatdate
import os.path
import sys
import mimetypes
import pymysql,xlsxwriter,datetime
 
 
def Excel(today,tomonth):
  begin = 2
  colour = '#660099'
  title = [u'发布时间',u'漏洞名称',u'详细URL']
 
  workbook = xlsxwriter.Workbook(u'%s绿盟漏洞发布.xlsx'%(today))
  worksheet = workbook.add_worksheet(tomonth)
 
  worksheet.set_column('A:A', 20)
  worksheet.set_column('B:B', 90)
  worksheet.set_column('C:C', 50)
  format_title = workbook.add_format()
  format_title.set_bold(1)
  format_title.set_font_color('white')
  format_title.set_bg_color(colour)
 
  worksheet.write_row('A1',title,format_title)
 
  conn = pymysql.connect(host='127.0.0.1',db='mzh',port=3306,user='root',passwd='redhat',charset='utf8')
  cur = conn.cursor()
 
  if int(tomonth) < 10:
    tomonth = tomonth.strip('0')
  sql = 'select DATE_FORMAT(sj,"%Y-%m-%d"),title,link from safe where month(sj) = ' + tomonth + ' order by sj desc;'
  cur.execute(sql)
 
  try:
    for i in cur.fetchall():
      worksheet.write('A%s'%str(begin), i[0])
      worksheet.write('B%s'%str(begin), i[1])
      worksheet.write('C%s'%str(begin), i[2])
      begin+=1
  except Exception as e:
    return False
 
  conn.close()
  workbook.close()
  return True
 
def Mail(ReplyTo,To,file_name,tomonth):
  tday=datetime.date.today().strftime('%Y/%m/%d')
  #命令 mail.py <1:发送方(回复地址)10000@qq.com>
<2:发送地址,多个以;隔开> <3:发送文件>
  From = "%s<1596179xxxx@139.com>" % Header("大帅比","utf-8")
  server = smtplib.SMTP("smtp.139.com",25)
  server.login("1596179xxxx@139.com","password")
#仅smtp服务器需要验证时
 
  #构造MIMEMultipart对象做为根容器
  main_msg = MIMEMultipart()
 
  #构造MIMEText对象做为邮件显示内容并附加到根容器
  text_msg = MIMEText(u"绿盟 %s月最新安全漏洞报告"%(tomonth),_charset="utf-8")          
  main_msg.attach(text_msg)
 
  #构造MIMEBase对象做为文件附件内容并附加到根容器
  ctype,encoding = mimetypes.guess_type(file_name)
  if ctype is None or encoding is not None:
    ctype='application/octet-stream'
  maintype,subtype = ctype.split('/',1)
  file_msg=MIMEImage(open(file_name,'rb').read(),subtype)
  #file_msg1=MIMEImage(open(file_name1,'rb').read(),subtype)
 
  ##
设置附件头
  basename = os.path.basename(file_name)
  file_msg.add_header('Content-Disposition','attachment', filename = basename)#修改邮件头
  main_msg.attach(file_msg)
 
  #basename1 = os.path.basename(file_name1)
  #file_msg1.add_header('Content-Disposition','attachment', filename = basename1)#修改邮件头
  #main_msg.attach(file_msg1)
 
  #
设置根容器属性
  main_msg['From'] = From
  main_msg['Reply-to'] = ReplyTo
  #main_msg['To'] = To
  main_msg['Subject'] = u"[每日更新] %s绿盟安全漏洞报告"
%tday
  main_msg['Date'] = formatdate()
  #main_msg['Bcc'] = To 
  #得到格式化后的完整文本 
  fullText = main_msg.as_string( ) 
 
  #用smtp发送邮件 
  try: 
    server.sendmail(From, To.split(';'), fullText)
  finally: 
    server.quit()
 
if __name__ == '__main__':
  today = datetime.date.today().strftime('%Y-%m-%d')
  tomonth = datetime.date.today().strftime('%m')
  ReplyTo = '1596179xxxx@139.com'
  To = "6025xxxxx@qq.com"
  file_name = u"/data/shell/%s绿盟漏洞发布.xlsx"%(today)
  if Excel(today,tomonth):
    Mail(ReplyTo,To,file_name,tomonth)
 
 
设置定时任务:
crontab-e

30 8 * * * cd /data/lmsafe/ && scrapy crawl safe --nolog
1 9 * * * /usr/bin/python3 /data/shell/Loophole.py
 
结果截图:





 

总结,感觉以前学的 Python基础都快忘光了,各种翻笔记。请各位看官帮忙提提意见。
最后,不知道在哪里看到的:在程序的世界里,一切重复性的,流程化的工作都可以交给自动化去完成。感觉挺带感的。
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
相关文章推荐