Skip to content

Commit

Permalink
Merge pull request #329 from EstrellaXD/3.0-dev
Browse files Browse the repository at this point in the history
3.0.5
  • Loading branch information
EstrellaXD authored Jun 13, 2023
2 parents 7086568 + 2a51490 commit 51f720a
Show file tree
Hide file tree
Showing 6 changed files with 75 additions and 84 deletions.
90 changes: 27 additions & 63 deletions src/module/database/bangumi.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,88 +115,54 @@ def search_all(self) -> list[BangumiData]:

def search_id(self, _id: int) -> BangumiData | None:
condition = {"id": _id}
value = self._search_data(table_name=self.__table_name, condition=condition)
# self._cursor.execute(
# """
# SELECT * FROM bangumi WHERE id = :id
# """,
# {"id": _id},
# )
# values = self._cursor.fetchone()
if value is None:
dict_data = self._search_data(table_name=self.__table_name, condition=condition)
if dict_data is None:
return None
keys = [x[0] for x in self._cursor.description]
dict_data = dict(zip(keys, value))
return self.__db_to_data(dict_data)

def search_official_title(self, official_title: str) -> BangumiData | None:
value = self._search_data(
dict_data = self._search_data(
table_name=self.__table_name, condition={"official_title": official_title}
)
# self._cursor.execute(
# """
# SELECT * FROM bangumi WHERE official_title = :official_title
# """,
# {"official_title": official_title},
# )
# values = self._cursor.fetchone()
if value is None:
if dict_data is None:
return None
keys = [x[0] for x in self._cursor.description]
dict_data = dict(zip(keys, value))
return self.__db_to_data(dict_data)

def match_poster(self, bangumi_name: str) -> str:
condition = f"INSTR({bangumi_name}, official_title) > 0"
condition = {"_custom_condition": "INSTR(:bangumi_name, official_title) > 0"}
keys = ["official_title", "poster_link"]
data = self._search_data(
table_name=self.__table_name,
keys=keys,
condition=condition,
)
# self._cursor.execute(
# """
# SELECT official_title, poster_link
# FROM bangumi
# WHERE INSTR(:bangumi_name, official_title) > 0
# """,
# {"bangumi_name": bangumi_name},
# )
# data = self._cursor.fetchone()
if not data:
return ""
official_title, poster_link = data
if not poster_link:
return ""
return poster_link

@locked
def match_list(self, torrent_list: list, rss_link: str) -> list:
# Match title_raw in database
keys = ["title_raw", "rss_link", "poster_link"]
data = self._search_datas(
match_datas = self._search_datas(
table_name=self.__table_name,
keys=keys,
)
# self._cursor.execute(
# """
# SELECT title_raw, rss_link, poster_link FROM bangumi
# """
# )
# data = self._cursor.fetchall()
if not data:
if not match_datas:
return torrent_list
# Match title
i = 0
while i < len(torrent_list):
torrent = torrent_list[i]
for title_raw, rss_set, poster_link in data:
if title_raw in torrent.name:
if rss_link not in rss_set:
rss_set += "," + rss_link
self.update_rss(title_raw, rss_set)
if not poster_link:
self.update_poster(title_raw, torrent.poster_link)
for match_data in match_datas:
if match_data.get("title_raw") in torrent.name:
if rss_link not in match_data.get("rss_link"):
match_data["rss_link"] += f",{rss_link}"
self.update_rss(match_data.get("title_raw"), match_data.get("rss_link"))
if not match_data.get("poster_link"):
self.update_poster(match_data.get("title_raw"), torrent.poster_link)
torrent_list.pop(i)
break
else:
Expand All @@ -205,27 +171,20 @@ def match_list(self, torrent_list: list, rss_link: str) -> list:

def not_complete(self) -> list[BangumiData]:
# Find eps_complete = False
condition = "eps_complete = 0"
data = self._search_datas(
condition = {"eps_collect": 0}
dict_data = self._search_datas(
table_name=self.__table_name,
condition=condition,
)

self._cursor.execute(
"""
SELECT * FROM bangumi WHERE eps_collect = 0
"""
)
return self.__fetch_data()
return [self.__db_to_data(x) for x in dict_data]

def not_added(self) -> list[BangumiData]:
self._cursor.execute(
"""
SELECT * FROM bangumi
WHERE added = 0 OR rule_name IS NULL OR save_path IS NULL
"""
condition = {"added": 0, "rule_name": None, "save_path": None}
dict_data = self._search_datas(
table_name=self.__table_name,
condition=condition,
)
return self.__fetch_data()
return [self.__db_to_data(x) for x in dict_data]

def gen_id(self) -> int:
self._cursor.execute(
Expand Down Expand Up @@ -255,3 +214,8 @@ def __check_list_exist(self, data_list: list[BangumiData]):
if self.__check_exist(data):
return True
return False

if __name__ == '__main__':
with BangumiDatabase() as db:
print(db.not_added())
print(db.not_complete())
38 changes: 25 additions & 13 deletions src/module/database/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,33 +94,41 @@ def _update_section(self, table_name: str, location: dict, update_dict: dict):
)
self._conn.commit()


def _delete_all(self, table_name: str):
self._cursor.execute(f"DELETE FROM {table_name}")
self._conn.commit()


def _search_data(self, table_name: str, keys: list[str] | None, condition: str) -> dict:
if keys is None:
self._cursor.execute(f"SELECT * FROM {table_name} WHERE {condition}")
else:
self._cursor.execute(
f"SELECT {', '.join(keys)} FROM {table_name} WHERE {condition}"
)
return dict(zip(keys, self._cursor.fetchone()))
def _delete(self, table_name: str, condition: dict):
condition_sql = " AND ".join([f"{key} = :{key}" for key in condition.keys()])
self._cursor.execute(f"DELETE FROM {table_name} WHERE {condition_sql}", condition)
self._conn.commit()


def _search_datas(self, table_name: str, keys: list[str] | None, condition: str = None) -> list[dict]:
def _search(self, table_name: str, keys: list[str] | None = None, condition: dict = None):
if keys is None:
select_sql = "*"
else:
select_sql = ", ".join(keys)
if condition is None:
self._cursor.execute(f"SELECT {select_sql} FROM {table_name}")
else:
custom_condition = condition.pop("_custom_condition", None)
condition_sql = " AND ".join([f"{key} = :{key}" for key in condition.keys()]) + (
f" AND {custom_condition}" if custom_condition else ""
)
self._cursor.execute(
f"SELECT {select_sql} FROM {table_name} WHERE {condition}"
f"SELECT {select_sql} FROM {table_name} WHERE {condition_sql}", condition
)

def _search_data(self, table_name: str, keys: list[str] | None = None, condition: dict = None) -> dict:
if keys is None:
keys = self.__get_table_columns(table_name)
self._search(table_name, keys, condition)
return dict(zip(keys, self._cursor.fetchone()))

def _search_datas(self, table_name: str, keys: list[str] | None = None, condition: dict = None) -> list[dict]:
if keys is None:
keys = self.__get_table_columns(table_name)
self._search(table_name, keys, condition)
return [dict(zip(keys, row)) for row in self._cursor.fetchall()]

def _table_exists(self, table_name: str) -> bool:
Expand All @@ -130,6 +138,10 @@ def _table_exists(self, table_name: str) -> bool:
)
return len(self._cursor.fetchall()) == 1

def __get_table_columns(self, table_name: str) -> list[str]:
self._cursor.execute(f"PRAGMA table_info({table_name})")
return [column_info[1] for column_info in self._cursor.fetchall()]

@staticmethod
def __python_to_sqlite_type(value) -> str:
if isinstance(value, int):
Expand Down
5 changes: 5 additions & 0 deletions src/module/parser/analyser/raw_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,3 +179,8 @@ def raw_parser(raw: str) -> Episode | None:
return Episode(
name_en, name_zh, name_jp, season, sr, episode, sub, group, dpi, source
)


if __name__ == '__main__':
title = "[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"
print(raw_parser(title))
7 changes: 4 additions & 3 deletions src/module/parser/analyser/tmdb_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ def is_animation(tv_id, language) -> bool:


def get_season(seasons: list) -> int:
for season in seasons:
ss = sorted(seasons, key=lambda e: e.get("air_date"), reverse=True)
for season in ss:
if re.search(r"第 \d 季", season.get("season")) is not None:
date = season.get("air_date").split("-")
[year, _ , _] = date
Expand Down Expand Up @@ -74,5 +75,5 @@ def tmdb_parser(title, language) -> TMDBInfo | None:


if __name__ == '__main__':
title = "鬼灭之刃"
print(tmdb_parser(title, "zh"))
title = "海盗战记"
print(tmdb_parser(title, "zh").last_season)
8 changes: 4 additions & 4 deletions src/module/parser/analyser/torrent_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@
r"(.*) - (\d{1,4}(?!\d|p)|\d{1,4}\.\d{1,2}(?!\d|p))(?:v\d{1,2})?(?: )?(?:END)?(.*)",
r"(.*)[\[\ E](\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\]\ ](.*)",
r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)",
r"(.*)第(\d*\.*\d*)[话話集](?:END)?(.*)",
r"(.*)第?(\d*\.*\d*)[话話集](?:END)?(.*)",
r"(.*)(?:S\d{2})?EP?(\d+)(.*)",
]

SUBTITLE_LANG = {
"zh-tw": ["TC", "CHT", "cht", "繁", "zh-tw"],
"zh": ["SC", "CHS", "chs", "简", "zh"],
"zh-tw": ["tc", "cht", "繁", "zh-tw"],
"zh": ["sc", "chs", "简", "zh"],
}


Expand Down Expand Up @@ -54,7 +54,7 @@ def get_season_and_title(season_and_title) -> tuple[str, int]:
def get_subtitle_lang(subtitle_name: str) -> str:
for key, value in SUBTITLE_LANG.items():
for v in value:
if v in subtitle_name:
if v in subtitle_name.lower():
return key


Expand Down
11 changes: 10 additions & 1 deletion src/module/parser/title_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,16 @@ def raw_parser(raw: str, rss_link: str) -> BangumiData | None:
"jp": episode.title_jp,
}
title_raw = episode.title_en if episode.title_en else episode.title_zh
official_title = titles[language] if titles[language] else titles["zh"]
if titles[language]:
official_title = titles[language]
elif titles["zh"]:
official_title = titles["zh"]
elif titles["en"]:
official_title = titles["en"]
elif titles["jp"]:
official_title = titles["jp"]
else:
official_title = title_raw
_season = episode.season
data = BangumiData(
official_title=official_title,
Expand Down

0 comments on commit 51f720a

Please sign in to comment.