-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
e8d2589
commit 30e536b
Showing
44 changed files
with
546 additions
and
466 deletions.
There are no files selected for viewing
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
File renamed without changes.
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,7 +1,7 @@ | ||
import asyncio | ||
import sys | ||
|
||
from .main import main | ||
from ._main import main | ||
|
||
|
||
sys.exit(asyncio.run(main())) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,174 @@ | ||
import re | ||
|
||
from ._types import SauceData, JavData | ||
|
||
|
||
def match_jav_1(name: str) -> JavData | None: | ||
m = re.search(r"(\w{2,6})[-_](\d{2,4}\w?)", name) | ||
if not m: | ||
return None | ||
name = f"{m.group(1)}-{m.group(2)}" | ||
name = name.upper() | ||
return JavData( | ||
name=name, | ||
sauce_list=[ | ||
SauceData(name="javbus", query=name), | ||
SauceData(name="javlibrary", query=name), | ||
], | ||
) | ||
|
||
|
||
def match_jav_2(name: str) -> JavData | None: | ||
m = re.search(r"(\d{3,4})(\w{3,6})[-_](\d{3,4}\w?)", name) | ||
if not m: | ||
return None | ||
name = f"{m.group(1)}{m.group(2)}-{m.group(3)}" | ||
name = name.upper() | ||
return JavData( | ||
name=name, | ||
sauce_list=[ | ||
SauceData(name="javbus", query=name), | ||
SauceData(name="javlibrary", query=name), | ||
], | ||
) | ||
|
||
|
||
def match_fc2(name: str) -> JavData | None: | ||
m = re.search(r"fc2[-_]ppv[-_](\d+)", name, re.I) | ||
if not m: | ||
return None | ||
name = f"FC2-PPV-{m.group(1)}" | ||
return JavData( | ||
name=name, | ||
sauce_list=[ | ||
SauceData(name="javbee", query=name), | ||
SauceData(name="javtorrent", query=name), | ||
], | ||
) | ||
|
||
|
||
def match_heydouga(name: str) -> JavData | None: | ||
m = re.search(r"hey(douga)?[-_ ]?(\d+)[-_](\d+)", name, re.I) | ||
if not m: | ||
return None | ||
a = m.group(2) | ||
b = m.group(3) | ||
name = f"HEYDOUGA-{a}-{b}" | ||
return JavData( | ||
name=name, | ||
sauce_list=[ | ||
SauceData(name="heydouga", query=f"{a}/{b}"), | ||
], | ||
) | ||
|
||
|
||
def match_caribpr(name: str) -> JavData | None: | ||
m = re.search(r"(\d{6})[-_](\d{3})-CARIBPR", name, re.I) | ||
if not m: | ||
return None | ||
a = m.group(1) | ||
b = m.group(2) | ||
name = f"CARIBPR {a}-{b}" | ||
return JavData( | ||
name=name, | ||
sauce_list=[SauceData(name="caribpr", query=f"{a}-{b}")], | ||
) | ||
|
||
|
||
def match_carib(name: str) -> JavData | None: | ||
m = re.search(r"(\d{6})[-_](\d{3})-CARIB", name, re.I) | ||
if not m: | ||
return None | ||
a = m.group(1) | ||
b = m.group(2) | ||
name = f"CARIB {a}-{b}" | ||
return JavData( | ||
name=name, | ||
sauce_list=[SauceData(name="carib", query=f"{a}-{b}")], | ||
) | ||
|
||
|
||
def match_1pondo(name: str) -> JavData | None: | ||
m = re.search(r"(\d{6})[-_](\d{3})-1PON", name, re.I) | ||
if not m: | ||
return None | ||
a = m.group(1) | ||
b = m.group(2) | ||
name = f"1PONDO {a}-{b}" | ||
return JavData( | ||
name=name, | ||
sauce_list=[SauceData(name="1pondo", query=f"{a}-{b}")], | ||
) | ||
|
||
|
||
def match_unknown(name: str) -> JavData | None: | ||
m = re.search(r"(\d{6})[-_](\d{3})", name, re.I) | ||
if not m: | ||
return None | ||
a = m.group(1) | ||
b = m.group(2) | ||
name = f"{a}-{b}" | ||
return JavData( | ||
name=name, | ||
sauce_list=[ | ||
SauceData(name="carib", query=f"{a}-{b}"), | ||
SauceData(name="caribpr", query=f"{a}-{b}"), | ||
SauceData(name="1pondo", query=f"{a}-{b}"), | ||
], | ||
) | ||
|
||
|
||
def match_mesubuta(name: str) -> JavData | None: | ||
m = re.search(r"(\d{6})[-_](\d{3})[-_](\d{2})", name, re.I) | ||
if not m: | ||
return None | ||
name = f"{m.group(1)}_{m.group(2)}_{m.group(3)}" | ||
return JavData( | ||
name=name, | ||
sauce_list=[], | ||
) | ||
|
||
|
||
def match_10musume(name: str) -> JavData | None: | ||
m = re.search(r"(\d{6})[-_](\d{2})-10MU", name, re.I) | ||
if not m: | ||
return None | ||
a = m.group(1) | ||
b = m.group(2) | ||
name = f"10MU {a}_{b}" | ||
return JavData( | ||
name=name, | ||
sauce_list=[ | ||
SauceData(name="10musume", query=f"{a}_{b}"), | ||
], | ||
) | ||
|
||
|
||
EXCLUDE_LIST = [ | ||
match_mesubuta, | ||
] | ||
|
||
|
||
INCLUDE_LIST = [ | ||
match_10musume, | ||
match_caribpr, | ||
match_carib, | ||
match_1pondo, | ||
match_heydouga, | ||
match_fc2, | ||
match_jav_1, | ||
match_jav_2, | ||
match_unknown, | ||
] | ||
|
||
|
||
def get_jav_query(name: str) -> JavData | None: | ||
for ex in EXCLUDE_LIST: | ||
rv = ex(name) | ||
if rv: | ||
return None | ||
for in_ in INCLUDE_LIST: | ||
rv = in_(name) | ||
if rv: | ||
return rv | ||
return None |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,134 @@ | ||
from argparse import ArgumentParser, Namespace | ||
from collections.abc import Callable, Awaitable | ||
from pathlib import Path | ||
from typing import Any | ||
import asyncio | ||
import sys | ||
|
||
import yaml | ||
from aiohttp import ClientSession | ||
from wcpan.drive.core.types import Drive, Node | ||
from wcpan.drive.cli.lib import create_drive_from_config | ||
|
||
from ._sauce import fetch_jav_data | ||
from ._dispatch import get_jav_query | ||
|
||
|
||
async def main(args: list[str] | None = None) -> int: | ||
parser = ArgumentParser("jav") | ||
|
||
parser.add_argument("--config", "-c", required=True, type=str) | ||
|
||
command = parser.add_subparsers() | ||
|
||
g_parser = command.add_parser("generate", aliases=["g"]) | ||
g_parser.add_argument("path", required=True, type=str) | ||
g_parser.set_defaults(action=_generate) | ||
|
||
a_parser = command.add_parser("apply", aliases=["a"]) | ||
a_parser.set_defaults(action=_apply) | ||
|
||
f_parser = command.add_parser("filter", aliases=["f"]) | ||
f_parser.set_defaults(action=_filter) | ||
|
||
kwargs = parser.parse_args(args) | ||
|
||
config_path = Path(kwargs.config).expanduser().resolve() | ||
action: Callable[[Drive, Namespace], Awaitable[int]] | None = kwargs.action | ||
if not action: | ||
return 1 | ||
|
||
async with create_drive_from_config(config_path) as drive: | ||
return await action(drive, kwargs) | ||
|
||
|
||
async def _generate(drive: Drive, kwargs: Namespace) -> int: | ||
root_path = Path(kwargs.path) | ||
|
||
async with ClientSession() as session: | ||
root_node = await drive.get_node_by_path(root_path) | ||
children = await drive.get_children(root_node) | ||
async for node in process_node_list(session, children): | ||
yaml.safe_dump( | ||
[node], | ||
sys.stdout, | ||
encoding="utf-8", | ||
allow_unicode=True, | ||
default_flow_style=False, | ||
) | ||
await asyncio.sleep(1) | ||
return 0 | ||
|
||
|
||
async def _apply(drive: Drive, kwargs: Namespace) -> int: | ||
manifest = yaml.safe_load(sys.stdin) | ||
for row in manifest: | ||
id_ = row["id"] | ||
title_dict = row["title"] | ||
|
||
for value in title_dict.values(): | ||
if not value: | ||
continue | ||
|
||
node = await drive.get_node_by_id(id_) | ||
print(f"rename {node.name} -> {value}") | ||
await rename(drive, node, value) | ||
break | ||
return 0 | ||
|
||
|
||
async def _filter(drive: Drive, kwargs: Namespace) -> int: | ||
manifest = yaml.safe_load(sys.stdin) | ||
|
||
not_all_null = (m for m in manifest if any(m["title"].values())) | ||
|
||
def all_same(m: dict[str, Any]): | ||
values = set(v for v in m["title"].values() if v) | ||
values.add(m["name"]) | ||
return len(values) == 1 | ||
|
||
not_all_same = (m for m in not_all_null if not all_same(m)) | ||
|
||
for node in not_all_same: | ||
yaml.safe_dump( | ||
[node], | ||
sys.stdout, | ||
encoding="utf-8", | ||
allow_unicode=True, | ||
default_flow_style=False, | ||
) | ||
|
||
return 0 | ||
|
||
|
||
async def process_node_list(session: ClientSession, node_list: list[Node]): | ||
for node in node_list: | ||
if node.is_trashed: | ||
continue | ||
jav_query = get_jav_query(node.name) | ||
if not jav_query: | ||
continue | ||
title = await fetch_jav_data(session, jav_query) | ||
|
||
yield { | ||
"id": node.id, | ||
"name": node.name, | ||
"jav_id": jav_query, | ||
"title": title, | ||
} | ||
|
||
|
||
async def rename(drive: Drive, node: Node, new_name: str) -> None: | ||
if node.is_directory: | ||
if new_name == node.name: | ||
print("skipped") | ||
return | ||
await drive.move(node, new_parent=None, new_name=new_name) | ||
return | ||
|
||
if not node.parent_id: | ||
return | ||
|
||
root_node = await drive.get_node_by_id(node.parent_id) | ||
parent = await drive.create_directory(new_name, root_node) | ||
await drive.move(node, new_parent=parent, new_name=None) |
Oops, something went wrong.