Files
stock-info-crawler/app/runner.py
MH Hung e89567643b feat(openinsider): 新增 OpenInsider 內部人交易爬蟲,支援多標的與每日排程
- 新增 app/crawlers/openinsider.py,來源 http://openinsider.com/search?q={symbol}

- 支援多標的:以 SYMBOLS=PLTR,NVDA,... 同時追多檔(或使用 SYMBOL 單一)

- runner: 多實例排程與啟動;/check 會依序觸發全部爬蟲

- API: /info、/stats、/check、/notify_test 支援多爬蟲回應

- config/base: 新增 RUN_DAILY_AT 每日固定時間;未設定則用 CHECK_INTERVAL

- notifications: 新增 send_custom_email、send_text_webhook、send_text_discord

- README 與 .env.template 更新;.env 改為 CRAWLER_TYPE=openinsider

- 移除 quiver_insiders 爬蟲與相關設定

BREAKING CHANGE: 不再支援 CRAWLER_TYPE=quiver_insiders;請改用 openinsider。
2025-09-04 22:32:29 +08:00

67 lines
2.4 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

from __future__ import annotations
import os
import threading
import time
import schedule
from app.config import load_config, setup_logging
from app.crawlers.barrons import BarronsCrawler
from app.crawlers.openinsider import OpenInsiderCrawler
from app.api.server import create_app
def start():
# Load configuration and setup logging
config = load_config()
logger = setup_logging(config.log_level, config.log_dir)
# Select crawler via env var
crawler_type = (os.getenv('CRAWLER_TYPE') or 'barrons').lower()
crawlers = []
if crawler_type in ('openinsider', 'open_insider'):
symbols_raw = os.getenv('SYMBOLS') or os.getenv('SYMBOL', 'PLTR')
symbols = [s.strip().upper() for s in symbols_raw.split(',') if s.strip()]
logger.info(f"使用 OpenInsider 內部人交易爬蟲symbols={symbols}")
for sym in symbols:
crawlers.append(OpenInsiderCrawler(config, logger, symbol=sym))
else:
logger.info("使用 Barron's 股票推薦爬蟲")
crawlers.append(BarronsCrawler(config, logger))
# Create and start API in background
app = create_app(crawlers if len(crawlers) > 1 else crawlers[0])
def run_api():
app.run(host='0.0.0.0', port=8080, debug=False)
flask_thread = threading.Thread(target=run_api, daemon=True)
flask_thread.start()
# Schedule checks for each crawler and run loop (blocking)
if getattr(config, 'run_daily_at', None):
for c in crawlers:
schedule.every().day.at(config.run_daily_at).do(c.run_check)
logger.info(f"🚀 多爬蟲已啟動,每天 {config.run_daily_at} 檢查一次:{[getattr(c, 'symbol', c.name) for c in crawlers]}")
else:
for c in crawlers:
schedule.every(config.check_interval).seconds.do(c.run_check)
logger.info(f"🚀 多爬蟲已啟動,每 {config.check_interval} 秒檢查一次:{[getattr(c, 'symbol', c.name) for c in crawlers]}")
# Initial run for each
for c in crawlers:
c.run_check()
# Mark first check done to respect ALWAYS_NOTIFY_ON_STARTUP logic afterwards
try:
c._first_check_done = True
except Exception:
pass
# Main loop
try:
while True:
schedule.run_pending()
time.sleep(1)
except KeyboardInterrupt:
logger.info("收到停止信號,正在關閉…")