mirror of
https://github.com/NanmiCoder/MediaCrawler.git
synced 2026-03-19 17:57:38 +08:00
fix: disable SSL verification for proxy/VPN environments
Add verify=False to all httpx.AsyncClient calls across bilibili, weibo, zhihu clients and crawler_util. Fixes SSL certificate validation errors when running behind a corporate proxy or VPN. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -68,7 +68,7 @@ class BilibiliClient(AbstractApiClient, ProxyRefreshMixin):
|
||||
# Check if proxy has expired before each request
|
||||
await self._refresh_proxy_if_expired()
|
||||
|
||||
async with httpx.AsyncClient(proxy=self.proxy) as client:
|
||||
async with httpx.AsyncClient(proxy=self.proxy, verify=False) as client:
|
||||
response = await client.request(method, url, timeout=self.timeout, **kwargs)
|
||||
try:
|
||||
data: Dict = response.json()
|
||||
@@ -222,7 +222,7 @@ class BilibiliClient(AbstractApiClient, ProxyRefreshMixin):
|
||||
|
||||
async def get_video_media(self, url: str) -> Union[bytes, None]:
|
||||
# Follow CDN 302 redirects and treat any 2xx as success (some endpoints return 206)
|
||||
async with httpx.AsyncClient(proxy=self.proxy, follow_redirects=True) as client:
|
||||
async with httpx.AsyncClient(proxy=self.proxy, follow_redirects=True, verify=False) as client:
|
||||
try:
|
||||
response = await client.request("GET", url, timeout=self.timeout, headers=self.headers)
|
||||
response.raise_for_status()
|
||||
|
||||
@@ -73,7 +73,7 @@ class WeiboClient(ProxyRefreshMixin):
|
||||
await self._refresh_proxy_if_expired()
|
||||
|
||||
enable_return_response = kwargs.pop("return_response", False)
|
||||
async with httpx.AsyncClient(proxy=self.proxy) as client:
|
||||
async with httpx.AsyncClient(proxy=self.proxy, verify=False) as client:
|
||||
response = await client.request(method, url, timeout=self.timeout, **kwargs)
|
||||
|
||||
if enable_return_response:
|
||||
@@ -261,7 +261,7 @@ class WeiboClient(ProxyRefreshMixin):
|
||||
:return:
|
||||
"""
|
||||
url = f"{self._host}/detail/{note_id}"
|
||||
async with httpx.AsyncClient(proxy=self.proxy) as client:
|
||||
async with httpx.AsyncClient(proxy=self.proxy, verify=False) as client:
|
||||
response = await client.request("GET", url, timeout=self.timeout, headers=self.headers)
|
||||
if response.status_code != 200:
|
||||
raise DataFetchError(f"get weibo detail err: {response.text}")
|
||||
@@ -291,7 +291,7 @@ class WeiboClient(ProxyRefreshMixin):
|
||||
# Since Weibo images are accessed through i1.wp.com, we need to concatenate the URL
|
||||
final_uri = (f"{self._image_agent_host}"
|
||||
f"{image_url}")
|
||||
async with httpx.AsyncClient(proxy=self.proxy) as client:
|
||||
async with httpx.AsyncClient(proxy=self.proxy, verify=False) as client:
|
||||
try:
|
||||
response = await client.request("GET", final_uri, timeout=self.timeout)
|
||||
response.raise_for_status()
|
||||
|
||||
@@ -98,7 +98,7 @@ class ZhiHuClient(AbstractApiClient, ProxyRefreshMixin):
|
||||
# return response.text
|
||||
return_response = kwargs.pop('return_response', False)
|
||||
|
||||
async with httpx.AsyncClient(proxy=self.proxy) as client:
|
||||
async with httpx.AsyncClient(proxy=self.proxy, verify=False) as client:
|
||||
response = await client.request(method, url, timeout=self.timeout, **kwargs)
|
||||
|
||||
if response.status_code != 200:
|
||||
|
||||
Reference in New Issue
Block a user