mirror of
https://github.com/NanmiCoder/MediaCrawler.git
synced 2026-02-06 23:21:33 +08:00
fix(store): 修复'crawler_type_var'的不当使用导致csv/json保存文件名异常的bug
This commit is contained in:
@@ -50,7 +50,7 @@ def calculate_number_of_files(file_store_path: str) -> int:
|
|||||||
class KuaishouCsvStoreImplement(AbstractStore):
|
class KuaishouCsvStoreImplement(AbstractStore):
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.writer = AsyncFileWriter(platform="kuaishou", crawler_type=kwargs.get("crawler_type"))
|
self.writer = AsyncFileWriter(platform="kuaishou", crawler_type=crawler_type_var.get())
|
||||||
|
|
||||||
async def store_content(self, content_item: Dict):
|
async def store_content(self, content_item: Dict):
|
||||||
"""
|
"""
|
||||||
@@ -127,7 +127,7 @@ class KuaishouDbStoreImplement(AbstractStore):
|
|||||||
class KuaishouJsonStoreImplement(AbstractStore):
|
class KuaishouJsonStoreImplement(AbstractStore):
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.writer = AsyncFileWriter(platform="kuaishou", crawler_type=kwargs.get("crawler_type"))
|
self.writer = AsyncFileWriter(platform="kuaishou", crawler_type=crawler_type_var.get())
|
||||||
|
|
||||||
async def store_content(self, content_item: Dict):
|
async def store_content(self, content_item: Dict):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ def calculate_number_of_files(file_store_path: str) -> int:
|
|||||||
class TieBaCsvStoreImplement(AbstractStore):
|
class TieBaCsvStoreImplement(AbstractStore):
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.writer = AsyncFileWriter(platform="tieba", crawler_type=kwargs.get("crawler_type"))
|
self.writer = AsyncFileWriter(platform="tieba", crawler_type=crawler_type_var.get())
|
||||||
|
|
||||||
async def store_content(self, content_item: Dict):
|
async def store_content(self, content_item: Dict):
|
||||||
"""
|
"""
|
||||||
@@ -149,7 +149,7 @@ class TieBaDbStoreImplement(AbstractStore):
|
|||||||
class TieBaJsonStoreImplement(AbstractStore):
|
class TieBaJsonStoreImplement(AbstractStore):
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.writer = AsyncFileWriter(platform="tieba", crawler_type=kwargs.get("crawler_type"))
|
self.writer = AsyncFileWriter(platform="tieba", crawler_type=crawler_type_var.get())
|
||||||
|
|
||||||
async def store_content(self, content_item: Dict):
|
async def store_content(self, content_item: Dict):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ def calculate_number_of_files(file_store_path: str) -> int:
|
|||||||
class WeiboCsvStoreImplement(AbstractStore):
|
class WeiboCsvStoreImplement(AbstractStore):
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.writer = AsyncFileWriter(platform="weibo", crawler_type=kwargs.get("crawler_type"))
|
self.writer = AsyncFileWriter(platform="weibo", crawler_type=crawler_type_var.get())
|
||||||
|
|
||||||
async def store_content(self, content_item: Dict):
|
async def store_content(self, content_item: Dict):
|
||||||
"""
|
"""
|
||||||
@@ -171,7 +171,7 @@ class WeiboDbStoreImplement(AbstractStore):
|
|||||||
class WeiboJsonStoreImplement(AbstractStore):
|
class WeiboJsonStoreImplement(AbstractStore):
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.writer = AsyncFileWriter(platform="weibo", crawler_type=kwargs.get("crawler_type"))
|
self.writer = AsyncFileWriter(platform="weibo", crawler_type=crawler_type_var.get())
|
||||||
|
|
||||||
async def store_content(self, content_item: Dict):
|
async def store_content(self, content_item: Dict):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -17,12 +17,12 @@ from database.models import XhsNote, XhsNoteComment, XhsCreator
|
|||||||
|
|
||||||
from tools.async_file_writer import AsyncFileWriter
|
from tools.async_file_writer import AsyncFileWriter
|
||||||
from tools.time_util import get_current_timestamp
|
from tools.time_util import get_current_timestamp
|
||||||
|
from var import crawler_type_var
|
||||||
|
|
||||||
class XhsCsvStoreImplement(AbstractStore):
|
class XhsCsvStoreImplement(AbstractStore):
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.writer = AsyncFileWriter(platform="xhs", crawler_type=kwargs.get("crawler_type"))
|
self.writer = AsyncFileWriter(platform="xhs", crawler_type=crawler_type_var.get())
|
||||||
|
|
||||||
async def store_content(self, content_item: Dict):
|
async def store_content(self, content_item: Dict):
|
||||||
"""
|
"""
|
||||||
@@ -51,7 +51,7 @@ class XhsCsvStoreImplement(AbstractStore):
|
|||||||
class XhsJsonStoreImplement(AbstractStore):
|
class XhsJsonStoreImplement(AbstractStore):
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.writer = AsyncFileWriter(platform="xhs", crawler_type=kwargs.get("crawler_type"))
|
self.writer = AsyncFileWriter(platform="xhs", crawler_type=crawler_type_var.get())
|
||||||
|
|
||||||
async def store_content(self, content_item: Dict):
|
async def store_content(self, content_item: Dict):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ def calculate_number_of_files(file_store_path: str) -> int:
|
|||||||
class ZhihuCsvStoreImplement(AbstractStore):
|
class ZhihuCsvStoreImplement(AbstractStore):
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.writer = AsyncFileWriter(platform="zhihu", crawler_type=kwargs.get("crawler_type"))
|
self.writer = AsyncFileWriter(platform="zhihu", crawler_type=crawler_type_var.get())
|
||||||
|
|
||||||
async def store_content(self, content_item: Dict):
|
async def store_content(self, content_item: Dict):
|
||||||
"""
|
"""
|
||||||
@@ -148,7 +148,7 @@ class ZhihuDbStoreImplement(AbstractStore):
|
|||||||
class ZhihuJsonStoreImplement(AbstractStore):
|
class ZhihuJsonStoreImplement(AbstractStore):
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.writer = AsyncFileWriter(platform="zhihu", crawler_type=kwargs.get("crawler_type"))
|
self.writer = AsyncFileWriter(platform="zhihu", crawler_type=crawler_type_var.get())
|
||||||
|
|
||||||
async def store_content(self, content_item: Dict):
|
async def store_content(self, content_item: Dict):
|
||||||
"""
|
"""
|
||||||
|
|||||||
Reference in New Issue
Block a user