update
This commit is contained in:
96
scripts/main.py
Normal file
96
scripts/main.py
Normal file
@@ -0,0 +1,96 @@
|
||||
|
||||
from flask import Flask, jsonify
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from nasa1 import scrape as scrape_nasa1
|
||||
from nasa2 import scrape as scrape_nasa2
|
||||
from esa import scrape as scrape_esa
|
||||
from threading import Lock
|
||||
|
||||
app = Flask(__name__)
|
||||
executor = ThreadPoolExecutor()
|
||||
lock = Lock() # 创建一个锁
|
||||
|
||||
scrape_running = False # 爬虫执行状态标志
|
||||
|
||||
def run_nasa1():
|
||||
try:
|
||||
scrape_nasa1()
|
||||
finally:
|
||||
global scrape_running
|
||||
scrape_running = False
|
||||
|
||||
def run_nasa2():
|
||||
try:
|
||||
scrape_nasa2()
|
||||
finally:
|
||||
global scrape_running
|
||||
scrape_running = False
|
||||
|
||||
def run_esa():
|
||||
try:
|
||||
scrape_esa()
|
||||
finally:
|
||||
global scrape_running
|
||||
scrape_running = False
|
||||
def run_all():
|
||||
try:
|
||||
scrape_esa()
|
||||
finally:
|
||||
global scrape_running
|
||||
scrape_running = False
|
||||
@app.route('/scrape/nasa', methods=['GET'])
|
||||
def nasa():
|
||||
global scrape_running
|
||||
if scrape_running:
|
||||
return jsonify({"message": "NASA scraping is already in progress."})
|
||||
|
||||
with lock: # 只允许一个线程进入
|
||||
scrape_running=True
|
||||
executor.submit(run_nasa1) # 异步执行爬虫
|
||||
|
||||
# asyncio.create_task(scrape_nasa1()) # 异步执行爬虫
|
||||
return jsonify({"message": "NASA scraping started."})
|
||||
|
||||
# 执行完成,重置状态
|
||||
|
||||
@app.route('/scrape/nasa2', methods=['GET'])
|
||||
def nasa2():
|
||||
global scrape_running
|
||||
if scrape_running:
|
||||
return jsonify({"message": "NASA scraping is already in progress."})
|
||||
|
||||
with lock: # 只允许一个线程进入
|
||||
scrape_running=True
|
||||
executor.submit(run_nasa2) # 异步执行爬虫
|
||||
|
||||
# asyncio.create_task(scrape_nasa2())
|
||||
return jsonify({"message": "NASA scraping started."})
|
||||
|
||||
|
||||
@app.route('/scrape/esa', methods=['GET'])
|
||||
def esa():
|
||||
global scrape_running
|
||||
if scrape_running:
|
||||
return jsonify({"message": "ESA scraping is already in progress."})
|
||||
with lock: # 只允许一个线程进入
|
||||
scrape_running=True
|
||||
executor.submit(scrape_esa) # 异步执行爬虫
|
||||
|
||||
# asyncio.create_task(scrape_esa())
|
||||
return jsonify({"message": "ESA scraping started."})
|
||||
|
||||
|
||||
@app.route('/scrape/all', methods=['GET'])
|
||||
def scrape_all():
|
||||
global scrape_running
|
||||
if scrape_running:
|
||||
return jsonify({"message": "All scraping is already in progress."})
|
||||
with lock: # 只允许一个线程进入
|
||||
scrape_running = True
|
||||
executor.submit(scrape_all) # 异步执行爬虫
|
||||
|
||||
return jsonify({"message": "All scraping started."})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True, use_reloader=False)
|
||||
Reference in New Issue
Block a user