mirror of
https://github.com/NanmiCoder/MediaCrawler.git
synced 2026-05-08 19:47:40 +08:00
refactor: 抽离应用 runner 并优化退出清理
- 新增 tools/app_runner.py 统一信号/取消/清理超时逻辑 - main.py 精简为业务入口与资源清理实现 - CDPBrowserManager 不再覆盖已有 SIGINT/SIGTERM 处理器
This commit is contained in:
133
main.py
133
main.py
@@ -19,9 +19,7 @@
|
||||
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import signal
|
||||
from typing import Optional
|
||||
from typing import Optional, Type
|
||||
|
||||
import cmd_arg
|
||||
import config
|
||||
@@ -39,7 +37,7 @@ from var import crawler_type_var
|
||||
|
||||
|
||||
class CrawlerFactory:
|
||||
CRAWLERS = {
|
||||
CRAWLERS: dict[str, Type[AbstractCrawler]] = {
|
||||
"xhs": XiaoHongShuCrawler,
|
||||
"dy": DouYinCrawler,
|
||||
"ks": KuaishouCrawler,
|
||||
@@ -53,115 +51,96 @@ class CrawlerFactory:
|
||||
def create_crawler(platform: str) -> AbstractCrawler:
|
||||
crawler_class = CrawlerFactory.CRAWLERS.get(platform)
|
||||
if not crawler_class:
|
||||
raise ValueError(
|
||||
"Invalid Media Platform Currently only supported xhs or dy or ks or bili ..."
|
||||
)
|
||||
supported = ", ".join(sorted(CrawlerFactory.CRAWLERS))
|
||||
raise ValueError(f"Invalid media platform: {platform!r}. Supported: {supported}")
|
||||
return crawler_class()
|
||||
|
||||
|
||||
crawler: Optional[AbstractCrawler] = None
|
||||
|
||||
|
||||
# persist-1<persist1@126.com>
|
||||
# 原因:增加 --init_db 功能,用于数据库初始化。
|
||||
# 副作用:无
|
||||
# 回滚策略:还原此文件。
|
||||
async def main():
|
||||
# Init crawler
|
||||
def _flush_excel_if_needed() -> None:
|
||||
if config.SAVE_DATA_OPTION != "excel":
|
||||
return
|
||||
|
||||
try:
|
||||
from store.excel_store_base import ExcelStoreBase
|
||||
|
||||
ExcelStoreBase.flush_all()
|
||||
print("[Main] Excel files saved successfully")
|
||||
except Exception as e:
|
||||
print(f"[Main] Error flushing Excel data: {e}")
|
||||
|
||||
|
||||
async def _generate_wordcloud_if_needed() -> None:
|
||||
if config.SAVE_DATA_OPTION != "json" or not config.ENABLE_GET_WORDCLOUD:
|
||||
return
|
||||
|
||||
try:
|
||||
file_writer = AsyncFileWriter(
|
||||
platform=config.PLATFORM,
|
||||
crawler_type=crawler_type_var.get(),
|
||||
)
|
||||
await file_writer.generate_wordcloud_from_comments()
|
||||
except Exception as e:
|
||||
print(f"[Main] Error generating wordcloud: {e}")
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
global crawler
|
||||
|
||||
# parse cmd
|
||||
args = await cmd_arg.parse_cmd()
|
||||
|
||||
# init db
|
||||
if args.init_db:
|
||||
await db.init_db(args.init_db)
|
||||
print(f"Database {args.init_db} initialized successfully.")
|
||||
return # Exit the main function cleanly
|
||||
|
||||
|
||||
return
|
||||
|
||||
crawler = CrawlerFactory.create_crawler(platform=config.PLATFORM)
|
||||
await crawler.start()
|
||||
|
||||
# Flush Excel data if using Excel export
|
||||
if config.SAVE_DATA_OPTION == "excel":
|
||||
try:
|
||||
from store.excel_store_base import ExcelStoreBase
|
||||
ExcelStoreBase.flush_all()
|
||||
print("[Main] Excel files saved successfully")
|
||||
except Exception as e:
|
||||
print(f"[Main] Error flushing Excel data: {e}")
|
||||
_flush_excel_if_needed()
|
||||
|
||||
# Generate wordcloud after crawling is complete
|
||||
# Only for JSON save mode
|
||||
if config.SAVE_DATA_OPTION == "json" and config.ENABLE_GET_WORDCLOUD:
|
||||
try:
|
||||
file_writer = AsyncFileWriter(
|
||||
platform=config.PLATFORM,
|
||||
crawler_type=crawler_type_var.get()
|
||||
)
|
||||
await file_writer.generate_wordcloud_from_comments()
|
||||
except Exception as e:
|
||||
print(f"Error generating wordcloud: {e}")
|
||||
await _generate_wordcloud_if_needed()
|
||||
|
||||
|
||||
async def async_cleanup():
|
||||
"""异步清理函数,用于处理CDP浏览器等异步资源"""
|
||||
async def async_cleanup() -> None:
|
||||
global crawler
|
||||
if crawler:
|
||||
# 检查并清理CDP浏览器
|
||||
if hasattr(crawler, 'cdp_manager') and crawler.cdp_manager:
|
||||
if getattr(crawler, "cdp_manager", None):
|
||||
try:
|
||||
await crawler.cdp_manager.cleanup(force=True) # 强制清理浏览器进程
|
||||
await crawler.cdp_manager.cleanup(force=True)
|
||||
except Exception as e:
|
||||
# 只在非预期错误时打印
|
||||
error_msg = str(e).lower()
|
||||
if "closed" not in error_msg and "disconnected" not in error_msg:
|
||||
print(f"[Main] 清理CDP浏览器时出错: {e}")
|
||||
|
||||
# 检查并清理标准浏览器上下文(仅在非CDP模式下)
|
||||
elif hasattr(crawler, 'browser_context') and crawler.browser_context:
|
||||
elif getattr(crawler, "browser_context", None):
|
||||
try:
|
||||
# 检查上下文是否仍然打开
|
||||
if hasattr(crawler.browser_context, 'pages'):
|
||||
await crawler.browser_context.close()
|
||||
await crawler.browser_context.close()
|
||||
except Exception as e:
|
||||
# 只在非预期错误时打印
|
||||
error_msg = str(e).lower()
|
||||
if "closed" not in error_msg and "disconnected" not in error_msg:
|
||||
print(f"[Main] 关闭浏览器上下文时出错: {e}")
|
||||
|
||||
# 关闭数据库连接
|
||||
if config.SAVE_DATA_OPTION in ["db", "sqlite"]:
|
||||
if config.SAVE_DATA_OPTION in ("db", "sqlite"):
|
||||
await db.close()
|
||||
|
||||
def cleanup():
|
||||
"""同步清理函数"""
|
||||
try:
|
||||
# 创建新的事件循环来执行异步清理
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
loop.run_until_complete(async_cleanup())
|
||||
loop.close()
|
||||
except Exception as e:
|
||||
print(f"[Main] 清理时出错: {e}")
|
||||
|
||||
|
||||
def signal_handler(signum, _frame):
|
||||
"""信号处理器,处理Ctrl+C等中断信号"""
|
||||
print(f"\n[Main] 收到中断信号 {signum},正在清理资源...")
|
||||
cleanup()
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# 注册信号处理器
|
||||
signal.signal(signal.SIGINT, signal_handler) # Ctrl+C
|
||||
signal.signal(signal.SIGTERM, signal_handler) # 终止信号
|
||||
from tools.app_runner import run
|
||||
|
||||
try:
|
||||
asyncio.get_event_loop().run_until_complete(main())
|
||||
except KeyboardInterrupt:
|
||||
print("\n[Main] 收到键盘中断,正在清理资源...")
|
||||
finally:
|
||||
cleanup()
|
||||
def _force_stop() -> None:
|
||||
c = crawler
|
||||
if not c:
|
||||
return
|
||||
cdp_manager = getattr(c, "cdp_manager", None)
|
||||
launcher = getattr(cdp_manager, "launcher", None)
|
||||
if not launcher:
|
||||
return
|
||||
try:
|
||||
launcher.cleanup()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
run(main, async_cleanup, cleanup_timeout_seconds=15.0, on_first_interrupt=_force_stop)
|
||||
|
||||
Reference in New Issue
Block a user