refactor: modularize project structure and separate API from crawlers
- Introduce app/ package with config, services (storage, notifications), API server, and crawler modules - Add BaseCrawler and BarronsCrawler; extract notifications and storage - Keep enhanced_crawler.py as back-compat entry delegating to app.runner - Add template crawler for future sites - Update README with new structure and usage - Extend .env.template with DATA_DIR/LOG_DIR options
This commit is contained in:
29
app/runner.py
Normal file
29
app/runner.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import threading
|
||||
|
||||
from app.config import load_config, setup_logging
|
||||
from app.crawlers.barrons import BarronsCrawler
|
||||
from app.api.server import create_app
|
||||
|
||||
|
||||
def start():
|
||||
# Load configuration and setup logging
|
||||
config = load_config()
|
||||
logger = setup_logging(config.log_level, config.log_dir)
|
||||
|
||||
# Create crawler instance
|
||||
crawler = BarronsCrawler(config, logger)
|
||||
|
||||
# Create and start API in background
|
||||
app = create_app(crawler)
|
||||
|
||||
def run_api():
|
||||
app.run(host='0.0.0.0', port=8080, debug=False)
|
||||
|
||||
flask_thread = threading.Thread(target=run_api, daemon=True)
|
||||
flask_thread.start()
|
||||
|
||||
# Run crawler loop (blocking)
|
||||
crawler.run()
|
||||
|
Reference in New Issue
Block a user