searchdomain.py 2.4 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071
  1. import whois
  2. from concurrent.futures import ThreadPoolExecutor
  3. import logging
  4. import argparse
  5. from . import db
  6. class SearchDomain(object):
  7. """search avaliable domain and save result"""
  8. def __init__(self, debug=False, export_all=False):
  9. '''
  10. 初始化
  11. debug 调试模式
  12. export_all 是否导出所有域名,默认导出可用域名
  13. return:
  14. '''
  15. super(SearchDomain, self).__init__()
  16. self.export_all=export_all
  17. parser = argparse.ArgumentParser(description='Demo of argparse')
  18. parser.add_argument(
  19. "--input", help="set input domain list file,eg: domain.txt", type=str, default="domain.txt")
  20. parser.add_argument(
  21. "--output", help="set output domain result list file,eg: result.txt", type=str, default="result.txt")
  22. args = parser.parse_args()
  23. if args.input:
  24. self.input = args.input
  25. if args.output:
  26. self.output = args.output
  27. if debug == True:
  28. logging.basicConfig(level=logging.DEBUG)
  29. def crawl(self, domain: str, index:int) -> None:
  30. '''
  31. 检测域名是否可用
  32. :params domain 域名:
  33. :return true or false'''
  34. res = False
  35. try:
  36. whi = whois.whois(domain)
  37. res = False
  38. logging.info(str(index) + ": searching domain:"+ domain + " is unavaliable.")
  39. except Exception as e:
  40. if(str(e).index("No match") == 0):
  41. res = True
  42. logging.info(str(index) + ": searching domain:"+ domain +" is avaliable.")
  43. else:
  44. res = False
  45. logging.error(e)
  46. if self.export_all:
  47. self.saveRes(domain, res)
  48. else:
  49. if res:
  50. self.saveRes(domain, res)
  51. def saveRes(self, domain: str, res: bool):
  52. """ save result to file """
  53. # db.Mysql().save()
  54. db.File().save(self.output, domain + " " + str(res))
  55. def run(self):
  56. '''begin search domain'''
  57. with open(self.input, "r", encoding="utf8", errors="ignore") as file:
  58. pool = ThreadPoolExecutor(max_workers=5)
  59. index = 0
  60. for line in file.readlines():
  61. index = index + 1
  62. pool.submit(self.crawl, line.strip(), index)
  63. if __name__ == '__main__':
  64. sd = SearchDomain()
  65. sd.run()