refactor: modularize project structure and separate API from crawlers
- Introduce app/ package with config, services (storage, notifications), API server, and crawler modules - Add BaseCrawler and BarronsCrawler; extract notifications and storage - Keep enhanced_crawler.py as back-compat entry delegating to app.runner - Add template crawler for future sites - Update README with new structure and usage - Extend .env.template with DATA_DIR/LOG_DIR options
This commit is contained in:
23
app/services/storage.py
Normal file
23
app/services/storage.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
def data_file_path(data_dir: str, name: str) -> str:
|
||||
os.makedirs(data_dir, exist_ok=True)
|
||||
return os.path.join(data_dir, name)
|
||||
|
||||
|
||||
def load_json(path: str) -> Dict[str, Any]:
|
||||
try:
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
except FileNotFoundError:
|
||||
return {'last_update': None, 'stock_picks': [], 'stats': {}}
|
||||
|
||||
|
||||
def save_json(path: str, data: Dict[str, Any]) -> None:
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, ensure_ascii=False, indent=2)
|
||||
|
Reference in New Issue
Block a user