- Introduce app/ package with config, services (storage, notifications), API server, and crawler modules - Add BaseCrawler and BarronsCrawler; extract notifications and storage - Keep enhanced_crawler.py as back-compat entry delegating to app.runner - Add template crawler for future sites - Update README with new structure and usage - Extend .env.template with DATA_DIR/LOG_DIR options
30 lines
705 B
Python
30 lines
705 B
Python
from __future__ import annotations
|
|
|
|
import threading
|
|
|
|
from app.config import load_config, setup_logging
|
|
from app.crawlers.barrons import BarronsCrawler
|
|
from app.api.server import create_app
|
|
|
|
|
|
def start():
|
|
# Load configuration and setup logging
|
|
config = load_config()
|
|
logger = setup_logging(config.log_level, config.log_dir)
|
|
|
|
# Create crawler instance
|
|
crawler = BarronsCrawler(config, logger)
|
|
|
|
# Create and start API in background
|
|
app = create_app(crawler)
|
|
|
|
def run_api():
|
|
app.run(host='0.0.0.0', port=8080, debug=False)
|
|
|
|
flask_thread = threading.Thread(target=run_api, daemon=True)
|
|
flask_thread.start()
|
|
|
|
# Run crawler loop (blocking)
|
|
crawler.run()
|
|
|