This commit is contained in:
caiwx86 2024-10-29 00:53:16 +08:00
parent eb19dd5cf9
commit c3de8413bf
3 changed files with 3 additions and 3 deletions

View File

@ -907,7 +907,7 @@ class oldUtils:
def new_files(self, files, folder, suffix="CBZ", result_type="new"): def new_files(self, files, folder, suffix="CBZ", result_type="new"):
result_files = self.old_files(files=files, folder=folder, suffix=suffix, result_type=result_type) result_files = self.old_files(files=files, folder=folder, suffix=suffix, result_type=result_type)
new_files = [] new_files = []
if result_files == None: if len(result_files) == 0:
if isinstance(files, str): new_files.append(ComicPath.chinese_convert(ComicPath.fix_file_name(files))) if isinstance(files, str): new_files.append(ComicPath.chinese_convert(ComicPath.fix_file_name(files)))
else: else:
for file in files: new_files.append(ComicPath.chinese_convert(ComicPath.fix_file_name(file))) for file in files: new_files.append(ComicPath.chinese_convert(ComicPath.fix_file_name(file)))

View File

@ -54,7 +54,7 @@ RETRY_HTTP_CODES = [408, 401, 504, 110, 500, 502, 503, 522, 524, 429]
CONCURRENT_REQUESTS_PER_DOMAIN = 16 CONCURRENT_REQUESTS_PER_DOMAIN = 16
CONCURRENT_REQUESTS_PER_IP = 16 CONCURRENT_REQUESTS_PER_IP = 16
PROXY_LIST = [ PROXY_LIST = [
"http://127.0.0.1:7890", # "http://127.0.0.1:7890",
# "http://proxy.local:20172", # "http://proxy.local:20172",
] ]
# Disable cookies (enabled by default) # Disable cookies (enabled by default)

View File

@ -9,7 +9,7 @@ from Comics._utils.utils import oldUtils
class RmComicSpider(scrapy.Spider): class RmComicSpider(scrapy.Spider):
name = 'rm_comic' name = 'rm_comic'
allowed_domains = ['roum18.xyz'] allowed_domains = ['rouman5.com']
main_url = 'https://'+allowed_domains[0] main_url = 'https://'+allowed_domains[0]
start_urls = main_url+"/books" start_urls = main_url+"/books"