1234567891011121314151617181920212223242526272829 |
- import whois
- from concurrent.futures import ThreadPoolExecutor
- import os,sys,re,json
- # import requests
- class SearchDomain(object):
- """docstring for SearchDomain"""
- def __init__(self):
- super(SearchDomain,self).__init__()
- def crawl(self, domain:str)->bool:
- try:
- whi = whois.whois(domain)
- return false
- except Exception as e:
- if(str(e).index("No match")==0):
- return true
- else:
- return False
- def run(self):
- with open("res/res.json","w",encoding="utf8") as file:
- pool=ThreadPoolExecutor(max_workers=10)
- for i in range(100):
- pool.submit(crawl, domain)
- if ___name__ == '__main__':
- ss = SearchDomain()
- ss.run()
|