mirror of
https://github.com/NanmiCoder/MediaCrawler.git
synced 2026-02-06 23:21:33 +08:00
refactor: xhs extractor
This commit is contained in:
@@ -26,6 +26,7 @@ from html import unescape
|
||||
from .exception import DataFetchError, IPBlockError
|
||||
from .field import SearchNoteType, SearchSortType
|
||||
from .help import get_search_id, sign
|
||||
from .extractor import XiaoHongShuExtractor
|
||||
|
||||
|
||||
class XiaoHongShuClient(AbstractApiClient):
|
||||
@@ -50,6 +51,7 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
self.NOTE_ABNORMAL_CODE = -510001
|
||||
self.playwright_page = playwright_page
|
||||
self.cookie_dict = cookie_dict
|
||||
self._extractor = XiaoHongShuExtractor()
|
||||
|
||||
async def _pre_headers(self, url: str, data=None) -> Dict:
|
||||
"""
|
||||
@@ -61,7 +63,9 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
Returns:
|
||||
|
||||
"""
|
||||
encrypt_params = await self.playwright_page.evaluate("([url, data]) => window._webmsxyw(url,data)", [url, data])
|
||||
encrypt_params = await self.playwright_page.evaluate(
|
||||
"([url, data]) => window._webmsxyw(url,data)", [url, data]
|
||||
)
|
||||
local_storage = await self.playwright_page.evaluate("() => window.localStorage")
|
||||
signs = sign(
|
||||
a1=self.cookie_dict.get("a1", ""),
|
||||
@@ -128,7 +132,9 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
if isinstance(params, dict):
|
||||
final_uri = f"{uri}?" f"{urlencode(params)}"
|
||||
headers = await self._pre_headers(final_uri)
|
||||
return await self.request(method="GET", url=f"{self._host}{final_uri}", headers=headers)
|
||||
return await self.request(
|
||||
method="GET", url=f"{self._host}{final_uri}", headers=headers
|
||||
)
|
||||
|
||||
async def post(self, uri: str, data: dict, **kwargs) -> Dict:
|
||||
"""
|
||||
@@ -156,12 +162,18 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
response = await client.request("GET", url, timeout=self.timeout)
|
||||
response.raise_for_status()
|
||||
if not response.reason_phrase == "OK":
|
||||
utils.logger.error(f"[XiaoHongShuClient.get_note_media] request {url} err, res:{response.text}")
|
||||
utils.logger.error(
|
||||
f"[XiaoHongShuClient.get_note_media] request {url} err, res:{response.text}"
|
||||
)
|
||||
return None
|
||||
else:
|
||||
return response.content
|
||||
except httpx.HTTPError as exc: # some wrong when call httpx.request method, such as connection error, client error, server error or response status code is not 2xx
|
||||
utils.logger.error(f"[DouYinClient.get_aweme_media] {exc.__class__.__name__} for {exc.request.url} - {exc}") # 保留原始异常类型名称,以便开发者调试
|
||||
except (
|
||||
httpx.HTTPError
|
||||
) as exc: # some wrong when call httpx.request method, such as connection error, client error, server error or response status code is not 2xx
|
||||
utils.logger.error(
|
||||
f"[DouYinClient.get_aweme_media] {exc.__class__.__name__} for {exc.request.url} - {exc}"
|
||||
) # 保留原始异常类型名称,以便开发者调试
|
||||
return None
|
||||
|
||||
async def pong(self) -> bool:
|
||||
@@ -178,7 +190,9 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
if note_card.get("items"):
|
||||
ping_flag = True
|
||||
except Exception as e:
|
||||
utils.logger.error(f"[XiaoHongShuClient.pong] Ping xhs failed: {e}, and try to login again...")
|
||||
utils.logger.error(
|
||||
f"[XiaoHongShuClient.pong] Ping xhs failed: {e}, and try to login again..."
|
||||
)
|
||||
ping_flag = False
|
||||
return ping_flag
|
||||
|
||||
@@ -249,9 +263,7 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
data = {
|
||||
"source_note_id": note_id,
|
||||
"image_formats": ["jpg", "webp", "avif"],
|
||||
"extra": {
|
||||
"need_body_topic": 1
|
||||
},
|
||||
"extra": {"need_body_topic": 1},
|
||||
"xsec_source": xsec_source,
|
||||
"xsec_token": xsec_token,
|
||||
}
|
||||
@@ -261,7 +273,9 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
res_dict: Dict = res["items"][0]["note_card"]
|
||||
return res_dict
|
||||
# 爬取频繁了可能会出现有的笔记能有结果有的没有
|
||||
utils.logger.error(f"[XiaoHongShuClient.get_note_by_id] get note id:{note_id} empty and res:{res}")
|
||||
utils.logger.error(
|
||||
f"[XiaoHongShuClient.get_note_by_id] get note id:{note_id} empty and res:{res}"
|
||||
)
|
||||
return dict()
|
||||
|
||||
async def get_note_comments(
|
||||
@@ -345,15 +359,19 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
comments_has_more = True
|
||||
comments_cursor = ""
|
||||
while comments_has_more and len(result) < max_count:
|
||||
comments_res = await self.get_note_comments(note_id=note_id, xsec_token=xsec_token, cursor=comments_cursor)
|
||||
comments_res = await self.get_note_comments(
|
||||
note_id=note_id, xsec_token=xsec_token, cursor=comments_cursor
|
||||
)
|
||||
comments_has_more = comments_res.get("has_more", False)
|
||||
comments_cursor = comments_res.get("cursor", "")
|
||||
if "comments" not in comments_res:
|
||||
utils.logger.info(f"[XiaoHongShuClient.get_note_all_comments] No 'comments' key found in response: {comments_res}")
|
||||
utils.logger.info(
|
||||
f"[XiaoHongShuClient.get_note_all_comments] No 'comments' key found in response: {comments_res}"
|
||||
)
|
||||
break
|
||||
comments = comments_res["comments"]
|
||||
if len(result) + len(comments) > max_count:
|
||||
comments = comments[:max_count - len(result)]
|
||||
comments = comments[: max_count - len(result)]
|
||||
if callback:
|
||||
await callback(note_id, comments)
|
||||
await asyncio.sleep(crawl_interval)
|
||||
@@ -386,7 +404,9 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
|
||||
"""
|
||||
if not config.ENABLE_GET_SUB_COMMENTS:
|
||||
utils.logger.info(f"[XiaoHongShuCrawler.get_comments_all_sub_comments] Crawling sub_comment mode is not enabled")
|
||||
utils.logger.info(
|
||||
f"[XiaoHongShuCrawler.get_comments_all_sub_comments] Crawling sub_comment mode is not enabled"
|
||||
)
|
||||
return []
|
||||
|
||||
result = []
|
||||
@@ -413,12 +433,16 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
)
|
||||
|
||||
if comments_res is None:
|
||||
utils.logger.info(f"[XiaoHongShuClient.get_comments_all_sub_comments] No response found for note_id: {note_id}")
|
||||
utils.logger.info(
|
||||
f"[XiaoHongShuClient.get_comments_all_sub_comments] No response found for note_id: {note_id}"
|
||||
)
|
||||
continue
|
||||
sub_comment_has_more = comments_res.get("has_more", False)
|
||||
sub_comment_cursor = comments_res.get("cursor", "")
|
||||
if "comments" not in comments_res:
|
||||
utils.logger.info(f"[XiaoHongShuClient.get_comments_all_sub_comments] No 'comments' key found in response: {comments_res}")
|
||||
utils.logger.info(
|
||||
f"[XiaoHongShuClient.get_comments_all_sub_comments] No 'comments' key found in response: {comments_res}"
|
||||
)
|
||||
break
|
||||
comments = comments_res["comments"]
|
||||
if callback:
|
||||
@@ -434,16 +458,10 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
eg: https://www.xiaohongshu.com/user/profile/59d8cb33de5fb4696bf17217
|
||||
"""
|
||||
uri = f"/user/profile/{user_id}"
|
||||
html_content = await self.request("GET", self._domain + uri, return_response=True, headers=self.headers)
|
||||
match = re.search(r"<script>window.__INITIAL_STATE__=(.+)<\/script>", html_content, re.M)
|
||||
|
||||
if match is None:
|
||||
return {}
|
||||
|
||||
info = json.loads(match.group(1).replace(":undefined", ":null"), strict=False)
|
||||
if info is None:
|
||||
return {}
|
||||
return info.get("user").get("userPageData")
|
||||
html_content = await self.request(
|
||||
"GET", self._domain + uri, return_response=True, headers=self.headers
|
||||
)
|
||||
return self._extractor.extract_creator_info_from_html(html_content)
|
||||
|
||||
async def get_notes_by_creator(
|
||||
self,
|
||||
@@ -492,17 +510,23 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
while notes_has_more and len(result) < config.CRAWLER_MAX_NOTES_COUNT:
|
||||
notes_res = await self.get_notes_by_creator(user_id, notes_cursor)
|
||||
if not notes_res:
|
||||
utils.logger.error(f"[XiaoHongShuClient.get_notes_by_creator] The current creator may have been banned by xhs, so they cannot access the data.")
|
||||
utils.logger.error(
|
||||
f"[XiaoHongShuClient.get_notes_by_creator] The current creator may have been banned by xhs, so they cannot access the data."
|
||||
)
|
||||
break
|
||||
|
||||
notes_has_more = notes_res.get("has_more", False)
|
||||
notes_cursor = notes_res.get("cursor", "")
|
||||
if "notes" not in notes_res:
|
||||
utils.logger.info(f"[XiaoHongShuClient.get_all_notes_by_creator] No 'notes' key found in response: {notes_res}")
|
||||
utils.logger.info(
|
||||
f"[XiaoHongShuClient.get_all_notes_by_creator] No 'notes' key found in response: {notes_res}"
|
||||
)
|
||||
break
|
||||
|
||||
notes = notes_res["notes"]
|
||||
utils.logger.info(f"[XiaoHongShuClient.get_all_notes_by_creator] got user_id:{user_id} notes len : {len(notes)}")
|
||||
utils.logger.info(
|
||||
f"[XiaoHongShuClient.get_all_notes_by_creator] got user_id:{user_id} notes len : {len(notes)}"
|
||||
)
|
||||
|
||||
remaining = config.CRAWLER_MAX_NOTES_COUNT - len(result)
|
||||
if remaining <= 0:
|
||||
@@ -515,7 +539,9 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
result.extend(notes_to_add)
|
||||
await asyncio.sleep(crawl_interval)
|
||||
|
||||
utils.logger.info(f"[XiaoHongShuClient.get_all_notes_by_creator] Finished getting notes for user {user_id}, total: {len(result)}")
|
||||
utils.logger.info(
|
||||
f"[XiaoHongShuClient.get_all_notes_by_creator] Finished getting notes for user {user_id}, total: {len(result)}"
|
||||
)
|
||||
return result
|
||||
|
||||
async def get_note_short_url(self, note_id: str) -> Dict:
|
||||
@@ -552,41 +578,17 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||
Returns:
|
||||
|
||||
"""
|
||||
|
||||
def camel_to_underscore(key):
|
||||
return re.sub(r"(?<!^)(?=[A-Z])", "_", key).lower()
|
||||
|
||||
def transform_json_keys(json_data):
|
||||
data_dict = json.loads(json_data)
|
||||
dict_new = {}
|
||||
for key, value in data_dict.items():
|
||||
new_key = camel_to_underscore(key)
|
||||
if not value:
|
||||
dict_new[new_key] = value
|
||||
elif isinstance(value, dict):
|
||||
dict_new[new_key] = transform_json_keys(json.dumps(value))
|
||||
elif isinstance(value, list):
|
||||
dict_new[new_key] = [(transform_json_keys(json.dumps(item)) if (item and isinstance(item, dict)) else item) for item in value]
|
||||
else:
|
||||
dict_new[new_key] = value
|
||||
return dict_new
|
||||
|
||||
url = ("https://www.xiaohongshu.com/explore/" + note_id + f"?xsec_token={xsec_token}&xsec_source={xsec_source}")
|
||||
url = (
|
||||
"https://www.xiaohongshu.com/explore/"
|
||||
+ note_id
|
||||
+ f"?xsec_token={xsec_token}&xsec_source={xsec_source}"
|
||||
)
|
||||
copy_headers = self.headers.copy()
|
||||
if not enable_cookie:
|
||||
del copy_headers["Cookie"]
|
||||
|
||||
html = await self.request(method="GET", url=url, return_response=True, headers=copy_headers)
|
||||
html = await self.request(
|
||||
method="GET", url=url, return_response=True, headers=copy_headers
|
||||
)
|
||||
|
||||
def get_note_dict(html):
|
||||
state = re.findall(r"window.__INITIAL_STATE__=({.*})</script>", html)[0].replace("undefined", '""')
|
||||
|
||||
if state != "{}":
|
||||
note_dict = transform_json_keys(state)
|
||||
return note_dict["note"]["note_detail_map"][note_id]["note"]
|
||||
return {}
|
||||
|
||||
try:
|
||||
return get_note_dict(html)
|
||||
except:
|
||||
return None
|
||||
return self._extractor.extract_note_detail_from_html(note_id, html)
|
||||
|
||||
Reference in New Issue
Block a user