mirror of
https://github.com/NanmiCoder/MediaCrawler.git
synced 2026-04-21 19:27:40 +08:00
feat: 快手视频评论爬取done;数据保存到DB、CSV done
This commit is contained in:
@@ -1,16 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Any, Callable, Dict, Optional
|
||||
from urllib.parse import urlencode
|
||||
from typing import Dict, Optional
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import BrowserContext, Page
|
||||
|
||||
from tools import utils
|
||||
|
||||
from .graphql import KuaiShouGraphQL
|
||||
from .exception import DataFetchError, IPBlockError
|
||||
from .graphql import KuaiShouGraphQL
|
||||
|
||||
|
||||
class KuaiShouClient:
|
||||
@@ -31,7 +31,7 @@ class KuaiShouClient:
|
||||
self.cookie_dict = cookie_dict
|
||||
self.graphql = KuaiShouGraphQL()
|
||||
|
||||
async def request(self, method, url, **kwargs) -> Dict:
|
||||
async def request(self, method, url, **kwargs) -> Any:
|
||||
async with httpx.AsyncClient(proxies=self.proxies) as client:
|
||||
response = await client.request(
|
||||
method, url, timeout=self.timeout,
|
||||
@@ -89,7 +89,6 @@ class KuaiShouClient:
|
||||
}
|
||||
return await self.post("", post_data)
|
||||
|
||||
|
||||
async def get_video_info(self, photo_id: str) -> Dict:
|
||||
"""
|
||||
Kuaishou web video detail api
|
||||
@@ -105,3 +104,71 @@ class KuaiShouClient:
|
||||
"query": self.graphql.get("video_detail")
|
||||
}
|
||||
return await self.post("", post_data)
|
||||
|
||||
async def get_video_comments(self, photo_id: str, pcursor: str = "") -> Dict:
|
||||
"""get video comments
|
||||
:param photo_id: photo id you want to fetch
|
||||
:param pcursor: last you get pcursor, defaults to ""
|
||||
:return:
|
||||
"""
|
||||
post_data = {
|
||||
"operationName": "commentListQuery",
|
||||
"variables": {
|
||||
"photoId": photo_id,
|
||||
"pcursor": pcursor
|
||||
},
|
||||
"query": self.graphql.get("comment_list")
|
||||
|
||||
}
|
||||
return await self.post("", post_data)
|
||||
|
||||
async def get_video_sub_comments(
|
||||
self, note_id: str,
|
||||
root_comment_id: str,
|
||||
num: int = 30, cursor: str = ""
|
||||
):
|
||||
"""
|
||||
get note sub comments
|
||||
:param note_id: note id you want to fetch
|
||||
:param root_comment_id: parent comment id
|
||||
:param num: recommend 30, if num greater 30, it only return 30 comments
|
||||
:param cursor: last you get cursor, defaults to ""
|
||||
:return: {"has_more": true,"cursor": "6422442d000000000700dcdb",comments: [],"user_id": "63273a77000000002303cc9b","time": 1681566542930}
|
||||
"""
|
||||
uri = "/api/sns/web/v2/comment/sub/page"
|
||||
params = {
|
||||
"note_id": note_id,
|
||||
"root_comment_id": root_comment_id,
|
||||
"num": num,
|
||||
"cursor": cursor,
|
||||
}
|
||||
return await self.get(uri, params)
|
||||
|
||||
async def get_video_all_comments(self, photo_id: str, crawl_interval: float = 1.0, is_fetch_sub_comments=False,
|
||||
callback: Optional[Callable] = None, ):
|
||||
"""
|
||||
get video all comments include sub comments
|
||||
:param photo_id:
|
||||
:param crawl_interval:
|
||||
:param is_fetch_sub_comments:
|
||||
:param callback:
|
||||
:return:
|
||||
"""
|
||||
|
||||
result = []
|
||||
pcursor = ""
|
||||
while pcursor != "no_more":
|
||||
comments_res = await self.get_video_comments(photo_id, pcursor)
|
||||
vision_commen_list = comments_res.get("visionCommentList", {})
|
||||
pcursor = vision_commen_list.get("pcursor", "")
|
||||
comments = vision_commen_list.get("rootComments", [])
|
||||
|
||||
if callback: # 如果有回调函数,就执行回调函数
|
||||
await callback(photo_id, comments)
|
||||
|
||||
await asyncio.sleep(crawl_interval)
|
||||
if not is_fetch_sub_comments:
|
||||
result.extend(comments)
|
||||
continue
|
||||
# todo handle get sub comments
|
||||
return result
|
||||
|
||||
Reference in New Issue
Block a user