mirror of
https://github.com/Evil0ctal/Douyin_TikTok_Download_API.git
synced 2025-04-20 11:35:01 +08:00
Merge pull request #503 from Lynxiayel/main
This commit is contained in:
commit
a7f87dff00
@ -74,7 +74,8 @@ class TikTokAPPCrawler:
|
|||||||
"Cookie": tiktok_config["headers"]["Cookie"],
|
"Cookie": tiktok_config["headers"]["Cookie"],
|
||||||
"x-ladon": "Hello From Evil0ctal!",
|
"x-ladon": "Hello From Evil0ctal!",
|
||||||
},
|
},
|
||||||
"proxies": {"http://": None, "https://": None},
|
"proxies": {"http://": tiktok_config["proxies"]["http"],
|
||||||
|
"https://": tiktok_config["proxies"]["https"]}
|
||||||
}
|
}
|
||||||
return kwargs
|
return kwargs
|
||||||
|
|
||||||
|
@ -89,7 +89,8 @@ class TikTokWebCrawler:
|
|||||||
"Referer": tiktok_config["headers"]["Referer"],
|
"Referer": tiktok_config["headers"]["Referer"],
|
||||||
"Cookie": tiktok_config["headers"]["Cookie"],
|
"Cookie": tiktok_config["headers"]["Cookie"],
|
||||||
},
|
},
|
||||||
"proxies": {"http://": None, "https://": None},
|
"proxies": {"http://": tiktok_config["proxies"]["http"],
|
||||||
|
"https://": tiktok_config["proxies"]["https"]}
|
||||||
}
|
}
|
||||||
return kwargs
|
return kwargs
|
||||||
|
|
||||||
@ -133,7 +134,7 @@ class TikTokWebCrawler:
|
|||||||
kwargs = await self.get_tiktok_headers()
|
kwargs = await self.get_tiktok_headers()
|
||||||
# proxies = {"http://": 'http://43.159.29.191:24144', "https://": 'http://43.159.29.191:24144'}
|
# proxies = {"http://": 'http://43.159.29.191:24144', "https://": 'http://43.159.29.191:24144'}
|
||||||
# 创建一个基础爬虫
|
# 创建一个基础爬虫
|
||||||
base_crawler = BaseCrawler(proxies=None, crawler_headers=kwargs["headers"])
|
base_crawler = BaseCrawler(proxies=kwargs["proxies"], crawler_headers=kwargs["headers"])
|
||||||
async with base_crawler as crawler:
|
async with base_crawler as crawler:
|
||||||
# 创建一个用户作品的BaseModel参数
|
# 创建一个用户作品的BaseModel参数
|
||||||
params = UserPost(secUid=secUid, cursor=cursor, count=count, coverFormat=coverFormat)
|
params = UserPost(secUid=secUid, cursor=cursor, count=count, coverFormat=coverFormat)
|
||||||
@ -216,7 +217,7 @@ class TikTokWebCrawler:
|
|||||||
kwargs = await self.get_tiktok_headers()
|
kwargs = await self.get_tiktok_headers()
|
||||||
# proxies = {"http://": 'http://43.159.18.174:25263', "https://": 'http://43.159.18.174:25263'}
|
# proxies = {"http://": 'http://43.159.18.174:25263', "https://": 'http://43.159.18.174:25263'}
|
||||||
# 创建一个基础爬虫
|
# 创建一个基础爬虫
|
||||||
base_crawler = BaseCrawler(proxies=None, crawler_headers=kwargs["headers"])
|
base_crawler = BaseCrawler(proxies=kwargs["proxies"], crawler_headers=kwargs["headers"])
|
||||||
async with base_crawler as crawler:
|
async with base_crawler as crawler:
|
||||||
# 创建一个作品评论的BaseModel参数
|
# 创建一个作品评论的BaseModel参数
|
||||||
params = PostComment(aweme_id=aweme_id, cursor=cursor, count=count, current_region=current_region)
|
params = PostComment(aweme_id=aweme_id, cursor=cursor, count=count, current_region=current_region)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user