ip反查網域名稱指令碼實現

2021-10-11 00:05:48 字數 4201 閱讀 5183

此指令碼基於對ip138 愛站 中國站長三個反查網域名稱**上的資訊進行爬取

使用到了fake_useragent模組

安裝命令pip3 install fake-useragent

指令碼如下:

import re, time, requests

from fake_useragent import useragent

ua = useragent()

# ip138

headers_ip138 =

# 愛站

headers_aizhan =

# 中國站長

headers_chinaz = # 站長這邊刪除了乙個referer

def ip138_spider(ip):

ip138_url = '' + str(ip) + '/'

ip138_r = requests.get(url=ip138_url, headers=headers_ip138, timeout=3).text

ip138_address = re.findall(r"", ip138_r) # 歸屬地

# result = re.findall(r"(.*?)", ip138_r)

if '暫無結果' in ip138_r:

print('[+]ip:{}'.format(ip))

print('歸屬地:{}'.format(ip138_address[0]))

print('未查到相關繫結資訊!')

else:

print('[+]ip:{}'.format(ip))

print('歸屬地:{}'.format(ip138_address[0]))

result_time = re.findall(r"""class="date">(.*?)""", ip138_r) # 繫結時間

result_site = re.findall(r"""

""", ip138_r) # 繫結網域名稱結果

print('繫結資訊如下:')

for i, j in enumerate(result_time):

print('{}-----{}'.format(j, result_site[i]))

print('-'*25)

def chinaz_spider(ip):

chinaz_url = '' + str(ip) + '&page='

chinaz_re = requests.get(chinaz_url, headers=headers_chinaz).text

# print(chinaz_re)

if '沒有找到任何站點' in chinaz_re:

print('沒有找到任何站點')

else:

chinaz_nums = re.findall(r'''(.*?)''', chinaz_re) # 獲得一共解析了多少個站點

print('[+]一共解析了{}個ip位址'.format(chinaz_nums[0]))

if int(chinaz_nums[0]) > 20:

pages = (int(chinaz_nums[0]) % 20) + (int(chinaz_nums[0]) // 20)

for page in range(1, pages+1):

chinaz_page_url = chinaz_url + str(page)

# print(chinaz_page_url)

chinaz_page_r = requests.get(url=chinaz_page_url, headers=headers_chinaz, timeout=2).text

# print(chinaz_page_r)

# 取出該ip曾經解析過多少個網域名稱

chinaz_domains = re.findall(r'''\'\)\" target=_blank>(.*?)''', chinaz_page_r)

# print(chinaz_domains)

# print(len(aizhan_domains))

for chinaz_domain in chinaz_domains:

print(chinaz_domain)

time.sleep(0.5)

else:

chinaz_address = re.findall(r'''\[(.*?)\]''', chinaz_re) # 獲得網域名稱位址

print('[+]位於:{}'.format(chinaz_address[0]))

chinaz_domains = re.findall(r'''\'\)\" target=_blank>(.*?)''', chinaz_re)

# print(chinaz_domains)

for chinaz_domain in chinaz_domains:

print(chinaz_domain)

def aizhan_spider(ip):

aizhan_url = '' + str(ip) + '/'

aizhan_r = requests.get(url=aizhan_url, headers=headers_aizhan, timeout=2).text

# print(aizhan_r)

# 1. 取出該位址的真實位址

aizhan_address = re.findall(r'''(.*?)''', aizhan_r)

# print(aizhan_address)

#中國浙江print('[+]歸屬地:{}'.format(aizhan_address[0]))

# 2. 取出該ip的解析過多少個網域名稱

aizhan_nums = re.findall(r'''(.*?)''', aizhan_r)

print('[+]該ip一共解析了:{}個網域名稱'.format(aizhan_nums[0]))

if int(aizhan_nums[0]) > 0:

if int(aizhan_nums[0]) > 20:

# 計算多少頁

pages = (int(aizhan_nums[0]) % 20) + (int(aizhan_nums[0]) // 20)

# print('該ip一共解析了{}頁'.format(pages))

for page in range(1, pages+1):

aizhan_page_url = aizhan_url + str(page) + '/'

# print(aizhan_page_url)

aizhan_page_r = requests.get(url=aizhan_page_url, headers=headers_aizhan, timeout=2).text

# 取出該ip曾經解析過多少個網域名稱

aizhan_domains = re.findall(r'''rel="nofollow" target="_blank">(.*?)''', aizhan_page_r)

# print(aizhan_domains)

# print(len(aizhan_domains))

for aizhan_domain in aizhan_domains:

print(aizhan_domain)

time.sleep(0.5)

else:

# 取出該ip曾經解析過多少個網域名稱

aizhan_domains = re.findall(r'''rel="nofollow" target="_blank">(.*?)''', aizhan_r)

# print(aizhan_domains)

# print(len(aizhan_domains))

for aizhan_domain in aizhan_domains:

print(aizhan_domain)

else:

print('共有0個網域名稱解析到該ip')

if __name__ == '__main__':

ip = '220.181.38.148'

# ip = '61.136.101.79'

ip138_spider(ip)

aizhan_spider(ip)

chinaz_spider(ip)

IP反查網域名稱資料

滲透測試做資訊收集時候,需要爬取同 ip的其他站點,進行滲透 是python3寫的,儲存檔案格式為csv coding utf 8 import requests from bs4 import beautifulsoup import csv defbanner print 51 print 3 ...

python ip反查詢 利用ip反查指定網域名稱

1.利用ip反查指定網域名稱 python usr bin python2.7 coding utf 8 import urllib,urllib2 import re import subprocess import sys output def html html print str respo...

用IP位址反查主機名

通過ping命令,既可以獲取到該機器的主機名,又可以獲取它的ip位址。除了上面這種方法之外,還可以通過另外乙個dos命令來獲取乙個機器的主機名 nbtstat nbtstat命令 用於檢視當前基於netbios的tcp ip連線狀態,通過該工具你可以獲得遠端或本地機器的組名和機器名。雖然使用者使用網...