在domains.txt这个文件里面写入域名。(搜集厂家的二级域名)
result是查询domains.txt里面所有的url的c段。
result2.txt是对result.txt进行去重过滤。
github:https://github.com/lufeirider/cscan
下面是联想c段的结果
223.202.25.1-223.202.25.254 124.127.169.1-124.127.169.254 23.47.143.1-23.47.143.254 64.26.251.1-64.26.251.254 23.42.178.1-23.42.178.254 223.202.19.1-223.202.19.254 199.119.127.1-199.119.127.254 219.142.122.1-219.142.122.254 205.216.62.1-205.216.62.254 66.147.244.1-66.147.244.254 12.130.131.1-12.130.131.254 121.199.167.1-121.199.167.254 103.30.232.1-103.30.232.254 182.118.84.1-182.118.84.254 223.203.219.1-223.203.219.254 64.88.179.1-64.88.179.254 209.167.231.1-209.167.231.254 208.74.204.1-208.74.204.254 69.168.101.1-69.168.101.254 69.168.97.1-69.168.97.254 106.37.234.1-106.37.234.254 115.29.16.1-115.29.16.254 203.196.120.1-203.196.120.254 58.247.171.1-58.247.171.254 54.215.241.1-54.215.241.254 208.115.47.1-208.115.47.254 211.100.14.1-211.100.14.254 202.96.27.1-202.96.27.254 219.141.216.1-219.141.216.254 12.11.224.1-12.11.224.254 66.117.30.1-66.117.30.254 223.202.27.1-223.202.27.254 114.113.233.1-114.113.233.254 198.7.31.1-198.7.31.254 59.61.88.1-59.61.88.254 123.127.211.1-123.127.211.254 58.20.164.1-58.20.164.254 58.20.132.1-58.20.132.254 139.219.9.1-139.219.9.254 222.73.233.1-222.73.233.254 223.202.62.1-223.202.62.254 123.125.148.1-123.125.148.254 202.85.217.1-202.85.217.254 121.199.251.1-121.199.251.254 121.199.24.1-121.199.24.254 223.203.218.1-223.203.218.254 42.121.0.1-42.121.0.254 42.121.138.1-42.121.138.254 66.192.69.1-66.192.69.254 52.88.225.1-52.88.225.254 162.216.170.1-162.216.170.254 194.168.19.1-194.168.19.254 67.222.128.1-67.222.128.254 10.38.64.1-10.38.64.254 50.19.223.1-50.19.223.254 123.159.202.1-123.159.202.254 66.161.21.1-66.161.21.254 10.96.160.1-10.96.160.254 69.4.236.1-69.4.236.254 69.7.171.1-69.7.171.254 95.154.206.1-95.154.206.254 50.57.139.1-50.57.139.254 23.53.201.1-23.53.201.254 23.253.122.1-23.253.122.254 210.51.191.1-210.51.191.254 54.231.0.1-54.231.0.254 114.247.140.1-114.247.140.254 123.103.15.1-123.103.15.254 117.104.161.1-117.104.161.254 202.107.203.1-202.107.203.254 103.30.235.1-103.30.235.254 54.183.218.1-54.183.218.254 211.100.36.1-211.100.36.254 42.121.254.1-42.121.254.254 123.150.177.1-123.150.177.254 10.96.144.1-10.96.144.254 23.10.2.1-23.10.2.254 43.255.224.1-43.255.224.254 184.26.200.1-184.26.200.254 104.98.45.1-104.98.45.254 54.227.254.1-54.227.254.254 66.172.15.1-66.172.15.254 117.104.160.1-117.104.160.254 162.243.138.1-162.243.138.254 103.244.58.1-103.244.58.254 10.96.0.1-10.96.0.254 70.32.76.1-70.32.76.254 59.120.128.1-59.120.128.254 12.130.158.1-12.130.158.254 116.213.92.1-116.213.92.254
#coding:gb2312
import os
import re
import Queue
import threading
q=Queue.Queue()
class getCSgement:
#初始化
def __init__(self,url):
if "http" in url:
pattern = re.compile(r‘(?<=//).+(?<!/)‘)
match = pattern.search(url)
try:
url = match.group()
except:
print "正则error"
self.url = url
else:
self.url = url
def cSgment(self):
lookStr = self.nsLookUp(self.url)
listIp = self.fetIp(lookStr)
if len(listIp)==0:
return "networkbad"
if self.checkCdn(listIp):
strIp = ""
for i in listIp:
strIp = strIp + i + ","
return strIp[:-1] + " (可能使用了cdn)"
return self.makeCSeg(listIp)
#使用nslookup命令进行查询
def nsLookUp(self,url):
cmd = ‘nslookup %s 8.8.8.8‘ % url
handle = os.popen(cmd , ‘r‘)
result = handle.read()
return result
#获取nslookup命令查询的结果里面的ip
def fetIp(self,result):
ips = re.findall(r‘(?1:
return True
return False
#生成c段
def makeCSeg(self,ips):
if not self.checkCdn(ips):
ipStr = "".join(ips)
end = ipStr.rfind(".")
return ipStr[0:end+1] + "1-" + ipStr[0:end+1] + "254"
#开始扫描
def scaner():
while not q.empty():
url=q.get()
t = getCSgement(url)
result = t.cSgment()
if not "networkbad" in result:
print url + ":" + result
if not "cdn" in result:
writeFile("result.txt", result + "\\\\n")
else:
t = getCSgement(url)
result2 = t.cSgment()
if not "networkbad" in result2:
print url + ":" + result2
if not "cdn" in result2:
writeFile("result.txt", result2 + "\\\\n")
else:
print url + ":不能访问 或者 网络不稳定"
if q.empty():
delRep()
#保存记录
def writeFile(filename,context):
f= file(filename,"a+")
f.write(context)
f.close()
#去重复
def delRep():
buff = []
for ln in open(‘result.txt‘):
if ln in buff:
continue
buff.append(ln)
with open(‘result2.txt‘, ‘w‘) as handle:
handle.writelines(buff)
#判断文件是否创建
def isExist():
if not os.path.exists(r‘result.txt‘):
f = open(‘result.txt‘, ‘w‘)
f.close()
else:
os.remove(‘result.txt‘)
if os.path.exists(r‘result2.txt‘):
os.remove(‘result2.txt‘)
if __name__=="__main__":
isExist()
#读取网址
lines = open("domains.txt","r")
for line in lines:
line=line.rstrip()
q.put(line)
#开启线程
for i in range(3):
t = threading.Thread(target=scaner)
t.start()本文出自 “路飞” 博客,请务必保留此出处http://llufei.blog.51cto.com/11390668/1758606
原文:http://llufei.blog.51cto.com/11390668/1758606