Files
stock-info-crawler/app/config.py
MH Hung e89567643b feat(openinsider): 新增 OpenInsider 內部人交易爬蟲,支援多標的與每日排程
- 新增 app/crawlers/openinsider.py,來源 http://openinsider.com/search?q={symbol}

- 支援多標的:以 SYMBOLS=PLTR,NVDA,... 同時追多檔(或使用 SYMBOL 單一)

- runner: 多實例排程與啟動;/check 會依序觸發全部爬蟲

- API: /info、/stats、/check、/notify_test 支援多爬蟲回應

- config/base: 新增 RUN_DAILY_AT 每日固定時間;未設定則用 CHECK_INTERVAL

- notifications: 新增 send_custom_email、send_text_webhook、send_text_discord

- README 與 .env.template 更新;.env 改為 CRAWLER_TYPE=openinsider

- 移除 quiver_insiders 爬蟲與相關設定

BREAKING CHANGE: 不再支援 CRAWLER_TYPE=quiver_insiders;請改用 openinsider。
2025-09-04 22:32:29 +08:00

99 lines
2.9 KiB
Python

import os
import logging
from dataclasses import dataclass
@dataclass
class EmailConfig:
smtp_server: str
smtp_port: int
smtp_security: str # 'ssl' | 'starttls' | 'none'
from_email: str
to_email: str
username: str
password: str
@dataclass
class AppConfig:
check_interval: int
log_level: str
always_notify_on_startup: bool
webhook_url: str | None
discord_webhook: str | None
data_dir: str
log_dir: str
email: EmailConfig | None
run_daily_at: str | None
def _resolve_dir(env_key: str, default_subdir: str) -> str:
# Prefer explicit env var
val = os.getenv(env_key)
if val:
return val
# Prefer Docker paths if present
docker_path = f"/app/{default_subdir}"
if os.path.isdir(docker_path):
return docker_path
# Fallback to local ./subdir
return os.path.join(os.getcwd(), default_subdir)
def load_email_config() -> EmailConfig | None:
required = [
'EMAIL_SMTP_SERVER', 'EMAIL_FROM', 'EMAIL_TO', 'EMAIL_USERNAME', 'EMAIL_PASSWORD'
]
if not all(os.getenv(k) for k in required):
return None
security = os.getenv('EMAIL_SMTP_SECURITY', 'starttls').lower()
default_port = 465 if security == 'ssl' else 587 if security == 'starttls' else 25
smtp_port = int(os.getenv('EMAIL_SMTP_PORT', default_port))
return EmailConfig(
smtp_server=os.getenv('EMAIL_SMTP_SERVER', ''),
smtp_port=smtp_port,
smtp_security=security,
from_email=os.getenv('EMAIL_FROM', ''),
to_email=os.getenv('EMAIL_TO', ''),
username=os.getenv('EMAIL_USERNAME', ''),
password=os.getenv('EMAIL_PASSWORD', ''),
)
def setup_logging(level: str, log_dir: str) -> logging.Logger:
os.makedirs(log_dir, exist_ok=True)
logging.basicConfig(
level=getattr(logging, level.upper(), logging.INFO),
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler(os.path.join(log_dir, 'crawler.log')),
logging.StreamHandler(),
],
)
return logging.getLogger(__name__)
def load_config() -> AppConfig:
check_interval = int(os.getenv('CHECK_INTERVAL', 300))
log_level = os.getenv('LOG_LEVEL', 'INFO')
always_notify_on_startup = os.getenv('ALWAYS_NOTIFY_ON_STARTUP', 'false').lower() in ('1', 'true', 'yes')
webhook_url = os.getenv('WEBHOOK_URL')
discord_webhook = os.getenv('DISCORD_WEBHOOK')
data_dir = _resolve_dir('DATA_DIR', 'data')
log_dir = _resolve_dir('LOG_DIR', 'logs')
run_daily_at = os.getenv('RUN_DAILY_AT') # e.g., "12:00"
return AppConfig(
check_interval=check_interval,
log_level=log_level,
always_notify_on_startup=always_notify_on_startup,
webhook_url=webhook_url,
discord_webhook=discord_webhook,
data_dir=data_dir,
log_dir=log_dir,
email=load_email_config(),
run_daily_at=run_daily_at,
)