This commit is contained in:
caiwx86 2025-02-05 01:56:30 +08:00
parent 0000f3df38
commit 98c47acea7
5 changed files with 46 additions and 9 deletions

View File

@ -341,8 +341,6 @@ class ComicInfoXml:
cpi.ImageSize = page.ImageSize
cpi.Key = page.Key
cpi.ImageWidth = page.ImageWidth
if page.ImageWidth != 720:
print(cpi)
cpi.ImageHeight = page.ImageHeight
page_elem = ET.SubElement(pages_elem, 'Page', cpi.toString())
else:

View File

@ -46,6 +46,26 @@ class DirectoryNaming:
path = os.path.join(path, f"{manga_info.title}.jpg")
return Path(path)
@classmethod
def get_unique_ordered(cls, a, b):
"""_summary_
Args:
a (_type_): a 去重
b (_type_): _description_
Returns:
_type_: _description_
"""
b_set = set(b)
seen = set()
unique = []
for item in a:
if item not in b_set and item not in seen:
seen.add(item)
unique.append(item)
return unique
class FileNaming:
"""文件命名策略类"""
PREFIX_SCRAMBLE = "scramble="

View File

@ -1,6 +1,6 @@
import asyncio
import aiohttp, json
import base64,hashlib,os,shutil,os.path,math
import base64,hashlib,os,shutil,os.path,math,time
from PIL import Image
import logging,time,os,shutil,re,xmlschema
from pathlib import Path
@ -376,6 +376,7 @@ class CBZUtils:
return False
if self._zip_compression(source_dir=chapter_dir, target_file=self.cbz_path, remove=False):
logger.info(f"章节 {chapter_dir.name} 打包完成: {self.cbz_path}")
time.sleep(0.5)
if clear_chapter:
try:
shutil.rmtree(chapter_dir)

View File

@ -140,22 +140,41 @@ class BaseSite(ABC):
list_cbz = list(FileNaming().get_filenames_optimized(cbz_dir, ext_filter=[".CBZ"]))
list_cover = await self.update_covers(manga_info)
# 用来保存所有CBZ的Cover.jpg
list_file_img = []
for cbz_path in list_cbz:
first_cover_path = str(cbz_path).split(".")[0]+".jpg"
if len(list_cover) == 1:
if FileNaming().file_update_by_date(first_cover_path, day=30):
if FileNaming().file_update_by_date(first_cover_path, day=30) or not os.path.exists(first_cover_path):
shutil.copy(list_cover[0].path, first_cover_path)
list_file_img.append(first_cover_path)
logger.info(f"{list_cover[0].path} ==> {first_cover_path} 已复制")
else:
list_file_img.append(first_cover_path)
continue
cover_count = 1
for cover in list_cover:
cover_path = cover.path
if os.path.exists(first_cover_path): os.remove(first_cover_path)
new_cover_path = FileNaming().cover_format_path(str(cbz_path).split(".")[0]+".jpg", count=cover_count)
if FileNaming().file_update_by_date(new_cover_path, day=30):
if FileNaming().file_update_by_date(new_cover_path, day=30) or not os.path.exists(new_cover_path):
shutil.copy(cover_path, new_cover_path)
list_file_img.append(new_cover_path)
logger.info(f"{cover_path} ==> {new_cover_path} 已复制")
cover_count += 1
else:
list_file_img.append(new_cover_path)
cover_count += 1
list_cbz_and_img = list(FileNaming().get_filenames_optimized(cbz_dir, ext_filter=[".jpg"]))
clear_imgs = DirectoryNaming.get_unique_ordered(list_cbz_and_img, list_file_img)
# 清理多余Img
if len(clear_imgs) > 0:
try:
for img in clear_imgs: os.remove(img)
except Exception:
logger.error(f"{clear_imgs} 删除失败")
async def download_manga(self, manga_url: str) -> AsyncGenerator[Dict, None]:
"""下载整部漫画"""

View File

@ -1,6 +1,5 @@
from pathlib import Path
from typing import Dict, Type, Optional
import logging
from src.config import BASE_IMAGES_DIR
from src.sites.base import BaseSite
from src.sites.configs.rouman import RoumanSite
@ -166,7 +165,7 @@ class MangaManager:
await self.download_manga(str(url), title = title, created_at = created_at)
@classmethod
async def download_manga(cls, url: str, title: str, created_at: str, save_dir: Path = BASE_IMAGES_DIR):
async def download_manga(cls, url: str, title: str = None, created_at: str = None, save_dir: Path = BASE_IMAGES_DIR):
"""下载漫画"""
manager = MangaManager(save_dir)
@ -203,7 +202,7 @@ class MangaManager:
logger.error(f"下载出错: {result['error']}")
# 全部下载完成
if int(total_chapters) == int(success_chapters):
if int(total_chapters) == int(success_chapters) and title != None and created_at != None:
MangaUtils().add_manga(title, created_at=created_at)
logger.info(f"全部完成 {title}, {created_at}")
except MangaException as e: