searchdomain.py 1.8 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859
  1. import whois
  2. from concurrent.futures import ThreadPoolExecutor
  3. import os
  4. import sys
  5. import re
  6. import json
  7. import logging
  8. import argparse
  9. from . import db
  10. class SearchDomain(object):
  11. """search avaliable domain and save result"""
  12. def __init__(self, debug=False):
  13. super(SearchDomain, self).__init__()
  14. parser = argparse.ArgumentParser(description='Demo of argparse')
  15. parser.add_argument(
  16. "--input", help="set input domain list file,eg: domain.txt", type=str, default="domain.txt")
  17. parser.add_argument(
  18. "--output", help="set output domain result list file,eg: result.txt", type=str, default="result.txt")
  19. args = parser.parse_args()
  20. if args.input:
  21. self.input = args.input
  22. if args.output:
  23. self.output = args.output
  24. if debug == True:
  25. logging.basicConfig(level=logging.DEBUG)
  26. def crawl(self, domain: str) -> None:
  27. '''
  28. 检测域名是否可用
  29. :params domain 域名:
  30. :return true or false'''
  31. res = False
  32. try:
  33. whi = whois.whois(domain)
  34. res = False
  35. except Exception as e:
  36. if(str(e).index("No match") == 0):
  37. res = True
  38. else:
  39. res = False
  40. self.saveRes(domain, res)
  41. def saveRes(self, domain: str, res: bool):
  42. # db.Mysql().save()
  43. db.File().save(self.output, domain + " " + str(res))
  44. def run(self):
  45. '''begin search domain'''
  46. with open(self.input, "r", encoding="utf8", errors="ignore") as file:
  47. pool = ThreadPoolExecutor(max_workers=10)
  48. for line in file.readlines():
  49. pool.submit(self.crawl, line.strip())
  50. if __name__ == '__main__':
  51. sd = SearchDomain()
  52. sd.run()