lqg 2 years ago
parent
commit
e49dd3241e
12 changed files with 236 additions and 35 deletions
  1. 135 0
      .gitignore
  2. 4 1
      README.md
  3. 8 0
      bin/searchdomain
  4. 0 0
      conf/conf.yaml
  5. 1 4
      docker-compose.yml
  6. 3 0
      domain.txt
  7. 1 2
      main.py
  8. 15 2
      searchdomain/config.py
  9. 7 5
      searchdomain/db.py
  10. 13 1
      searchdomain/domain_notify.py
  11. 37 20
      searchdomain/searchdomain.py
  12. 12 0
      test/search.py

+ 135 - 0
.gitignore

@@ -0,0 +1,135 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+#   However, in case of collaboration, if having platform-specific dependencies or dependencies
+#   having no cross-platform support, pipenv may install dependencies that don't work, or not
+#   install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+#¹ýÂË
+/Download/
+readconf.py
+static/uploads
+.flaskenv

+ 4 - 1
README.md

@@ -6,7 +6,7 @@
 
 
 ```
-virtualvenv .venv
+virtualenv .venv
 pip install -r requirements.txt
 ```
 
@@ -20,6 +20,9 @@ python generateDomain.py
 python main.py
 ```
 
+## windows
+
+命令行模式:
 
 ## docker
 

+ 8 - 0
bin/searchdomain

@@ -0,0 +1,8 @@
+#!/bin/bash
+# @Contact :   liuyuqi.gov@msn.cn
+# @Time    :   2022/12/14 21:03:32
+# @License :   (C)Copyright 2022 liuyuqi.
+# @Desc    :   
+###############################################################################
+
+python ../main.py

+ 0 - 0
conf/conf.yaml


+ 1 - 4
docker-compose.yml

@@ -2,7 +2,4 @@ version: '3.4'
 
 services:
   searchdomain:
-    image: searchdomain
-    build:
-      context: .
-      dockerfile: ./Dockerfile
+    image: jianboy/earchdomain

+ 3 - 0
domain.txt

@@ -0,0 +1,3 @@
+baidu.com
+sin.com
+baiduasdff44343.com

+ 1 - 2
main.py

@@ -10,7 +10,6 @@
 
 from searchdomain import SearchDomain
 
-
 if __name__== "__main__":
-    serachdomain = SearchDomain() 
+    serachdomain = SearchDomain(debug=False) 
     serachdomain.run()

+ 15 - 2
searchdomain/config.py

@@ -3,5 +3,18 @@ class Config(object):
     def __init__(self):
         pass
 
-if __name__ == "__main__":
-    pass
+class DevelopmentConfig(Config):
+
+    def __init__(self):
+        pass
+
+class ProductionConfig(Config):
+    
+    def __init__(self):
+        pass
+
+config= {
+    "default": DevelopmentConfig,
+    "develop": DevelopmentConfig,
+    "production": ProductionConfig
+}

+ 7 - 5
searchdomain/db.py

@@ -1,5 +1,5 @@
 import os,sys,re,json
-import mysql
+# import mysql
 
 class Db():
     '''Db接口'''
@@ -11,8 +11,11 @@ class Db():
 
 class File(Db):
     '''文件保存结果'''
-    def save(domain: str, res: bool):
-        return super().save(res)
+
+    def save(self, filePath: str, res: str):
+        # super().save(res)
+        with open(filePath,'a+',encoding='utf-8') as file:
+            file.writelines(res+"\n")
 
 class Mysql(Db):
     '''mysql数据库保存数据库'''
@@ -22,5 +25,4 @@ class Mysql(Db):
 class Sqlite(Db):
     '''sqlite保存'''
     def save(domain: str, res: bool):
-        return super().save(res)
-
+        return super().save(res)

+ 13 - 1
searchdomain/domain_notify.py

@@ -2,7 +2,8 @@ import whois
 from concurrent.futures import ThreadPoolExecutor
 import os,sys,re,json
 # import requests
-
+import time
+from .push import EmailPush
 class DomainNotify(object):
     """域名到期推送"""
     def __init__(self):
@@ -16,6 +17,13 @@ class DomainNotify(object):
         res=False
         try:
             whi = whois.whois(domain)
+            expirationDate= whi.expiration_date
+            notifyDate = str(expirationDate - time.datetime.timedelta(days=1))
+            today=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
+            days=DomainNotify.getDays(notifyDate,today)
+            if days <=3:
+                push=EmailPush()
+                push.push()
             res= False
         except Exception as e:
             if(str(e).index("No match")==0):
@@ -25,6 +33,10 @@ class DomainNotify(object):
         self.saveRes(domain,res)
         self.notify(domain,res)
     
+    @staticmethod
+    def getDays(notifyDay:str, today:str):
+        pass
+
     def notify(self,domain):
         '''结果推送'''
         pass

+ 37 - 20
searchdomain/searchdomain.py

@@ -1,41 +1,58 @@
 import whois
 from concurrent.futures import ThreadPoolExecutor
-import os,sys,re,json
-# import requests
+import os
+import sys
+import re
+import json
 import logging
+import argparse
+from . import db
+
+
 class SearchDomain(object):
     """search avaliable domain and save result"""
 
-    def __init__(self):
-        super(SearchDomain,self).__init__()
-    
-    def crawl(self, domain:str)->None:
+    def __init__(self, debug=False):
+        super(SearchDomain, self).__init__()
+        parser = argparse.ArgumentParser(description='Demo of argparse')
+        parser.add_argument(
+            "--input", help="set input domain list file,eg: domain.txt", type=str, default="domain.txt")
+        parser.add_argument(
+            "--output", help="set output domain result list file,eg: result.txt", type=str, default="result.txt")
+        args = parser.parse_args()
+        if args.input:
+            self.input = args.input
+        if args.output:
+            self.output = args.output
+        if debug == True:
+            logging.basicConfig(level=logging.DEBUG)
+
+    def crawl(self, domain: str) -> None:
         '''
         检测域名是否可用
         :params domain 域名:
         :return true or false'''
-        res=False
+        res = False
         try:
             whi = whois.whois(domain)
-            res= False
+            res = False
         except Exception as e:
-            if(str(e).index("No match")==0):
-                res= True
+            if(str(e).index("No match") == 0):
+                res = True
             else:
-                res= False
-        self.saveRes(domain,res)
+                res = False
+        self.saveRes(domain, res)
 
-    def saveRes(damin:str, res:bool):
-        # mysql.save()
-        # file.save()
-        pass
+    def saveRes(self, domain: str, res: bool):
+        # db.Mysql().save()
+        db.File().save(self.output, domain + "    " + str(res))
 
     def run(self):
         '''begin search domain'''
-        with open("res/res.json","w",encoding="utf8") as file:
-            pool=ThreadPoolExecutor(max_workers=10)
-            for i in range(100):
-                pool.submit(self.crawl, domain)
+        with open(self.input, "r", encoding="utf8", errors="ignore") as file:
+            pool = ThreadPoolExecutor(max_workers=10)
+            for line in file.readlines():
+                pool.submit(self.crawl, line.strip())
 
 if __name__ == '__main__':
     sd = SearchDomain()

+ 12 - 0
test/search.py

@@ -0,0 +1,12 @@
+import whois
+
+res = False
+try:
+    whi = whois.whois("sin.com")
+    res = False
+except Exception as e:
+    if(str(e).index("No match") == 0):
+        res = True
+    else:
+        res = False
+print(res)