Browse Source

set static folder

liuyuqi-dellpc 7 months ago
parent
commit
c8d667c1a1
3 changed files with 53 additions and 48 deletions
  1. 7 38
      crawl_xiaohua/__init__.py
  2. 39 10
      crawl_xiaohua/crawl_xiaohua.py
  3. 7 0
      docs/Development.md

+ 7 - 38
crawl_xiaohua/__init__.py

@@ -6,44 +6,13 @@
 @License :   Copyright © 2017-2022 liuyuqi. All Rights Reserved.
 @Desc    :   main function
 '''
-import time
-import sys
-import re
-import os
-from crawl_xiaohua.crawl_xiaohua import CrawlXiaohua
-from crawl_xiaohua.extractor.mzsock import Mzsock
-from crawl_xiaohua.extractor.xiaohuar import Xiaohuar
-from flask import Flask
-
 
-def server(config: str, argv=None):
-    ''' web server mode '''
-    if argv is None:
-        argv = sys.argv
-    else:
-        sys.argv.extend(argv)
-    app = Flask(__name__)
-    app.run()
+from crawl_xiaohua.crawl_xiaohua import CrawlXiaohua
 
+def server(config:str,argv=None):
+    crawl_xiaohua = CrawlXiaohua(config)
+    crawl_xiaohua.server(config=config)
 
-def run(extractor: str, cmd: str, argv=None):
-    ''' shell mode '''
-    if argv is None:
-        argv = sys.argv
-    if extractor == 'xiaohua':
-        crawl = CrawlXiaohua()
-        if cmd == 'duanzi':
-            crawl.crawlDuanzi()
-        else:
-            crawl.crawl()
-    elif extractor == 'xiaohuar':
-        crawl = Xiaohuar()
-        crawl.run()
-    elif extractor == 'mzsock':
-        crawl = Mzsock()
-        categroy_urls = crawl.get_categroy_url()
-        urllist = crawl.get_urllist(categroy_urls)
-        contentlist = crawl.get_contentlist(urllist)
-        crawl.get_content(contentlist)
-    else:
-        print('unknown extractor: %s' % extractor)
+def run(extractor:str, cmd:str,argv=None):
+    crawl_xiaohua = CrawlXiaohua()
+    crawl_xiaohua.run(extractor, cmd)

+ 39 - 10
crawl_xiaohua/crawl_xiaohua.py

@@ -7,15 +7,11 @@
 @Desc    :   crawl xiaohua.com
 '''
 
-from contextlib import closing
-import os
-import random
-import time
-from crawl_xiaohua.libs.json_conf import JsonConf
-import requests
-from crawl_xiaohua import api
-import bs4
-import pandas as pd
+from flask import Flask
+import sys
+from crawl_xiaohua.crawl_xiaohua import CrawlXiaohua
+from crawl_xiaohua.extractor.mzsock import Mzsock
+from crawl_xiaohua.extractor.xiaohuar import Xiaohuar
 from flask import Flask
 
 class CrawlXiaohua():
@@ -24,4 +20,37 @@ class CrawlXiaohua():
     def __init__(self):
         ''' init '''
         pass
-    
+        
+    def server(self, config: str, argv=None):
+        ''' web server mode '''
+        if argv is None:
+            argv = sys.argv
+        else:
+            sys.argv.extend(argv)
+        app = Flask(__name__,
+                    static_folder='web/static',
+                    template_folder='web/templates')
+        app.run()
+
+
+    def run(self, extractor: str, cmd: str, argv=None):
+        ''' shell mode '''
+        if argv is None:
+            argv = sys.argv
+        if extractor == 'xiaohua':
+            crawl = CrawlXiaohua()
+            if cmd == 'duanzi':
+                crawl.crawlDuanzi()
+            else:
+                crawl.crawl()
+        elif extractor == 'xiaohuar':
+            crawl = Xiaohuar()
+            crawl.run()
+        elif extractor == 'mzsock':
+            crawl = Mzsock()
+            categroy_urls = crawl.get_categroy_url()
+            urllist = crawl.get_urllist(categroy_urls)
+            contentlist = crawl.get_contentlist(urllist)
+            crawl.get_content(contentlist)
+        else:
+            print('unknown extractor: %s' % extractor)

+ 7 - 0
docs/Development.md

@@ -47,3 +47,10 @@ pip install pyinstaller
 pyinstaller -F -c -i launch200.ico main.py
 ```
 
+
+**扩展打包:**
+```
+python setup.py bdist_wheel
+
+python setup.py install
+```