Skip to content

Commit

Permalink
Merge branch 'refs/heads/main_v2' into dev_v2
Browse files Browse the repository at this point in the history
  • Loading branch information
sctop committed Jun 15, 2024
2 parents 67db57c + b1c9ae5 commit 03e7b55
Show file tree
Hide file tree
Showing 7 changed files with 121 additions and 21 deletions.
2 changes: 1 addition & 1 deletion data
Submodule data updated 1599 files
14 changes: 11 additions & 3 deletions data_model/actual_data/_story/story_part_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,8 @@ def __init__(self, data, story):
self.data_special = {
"flag": self.data["special"]["flag"],
"track": None if self.data["special"]["track"] == "OST_0" else TrackInfo.get_instance(
self.data["special"]["track"])
self.data["special"]["track"]),
"char": [] if "char" not in self.data["special"].keys() else self.data["special"]["char"]
}

def load(self):
Expand Down Expand Up @@ -188,10 +189,14 @@ def to_json(self):
"char_to_char": self.data["char_to_char"],
"special": {
"flag": self.data_special["flag"],
"track": None if self.data_special["track"] is None else self.data_special["track"].to_json_basic()
"track": None if self.data_special["track"] is None else self.data_special["track"].to_json_basic(),
"char": self.data_special["char"]
}
}

def to_json_basic(self):
return self.to_json()

@property
def bgm_special(self):
# see this traceback:
Expand All @@ -209,4 +214,7 @@ def to_json_basic_tracks(self):
# traceback:
# File "F:\GitFile\BA_OST_Index_Parser\data_model\actual_data\story.py", line 355, in to_json
# t["bgm_special"] = self.part.to_json_basic_tracks()
return [self.data_special["track"].to_json_basic()]
try:
return [self.data_special["track"].to_json_basic()]
except Exception:
return []
83 changes: 83 additions & 0 deletions data_model/actual_data/_track/track_playback.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import logging
from data_model.loader import i18n_translator
from data_model.tool.to_json import IToJson
from data_model.loader.manager_constant import constant_manager
from collections import UserList


class TrackPlaybackDataEntry(IToJson):
def __init__(self, nx_data: dict, iframe_data: dict):
self.nx_data = nx_data
self.iframe_data = iframe_data

self.process()

@staticmethod
def get_iframe_url(para1_type: int, para2_id: int, url: str = "https://music.163.com/outchain/player"):
if url == "":
url = "https://music.163.com/outchain/player"
return f"{url}?type={para1_type}&id={para2_id}"

def process(self):
self.result = {
"id": self.nx_data["id"],
"path": self.nx_data["path"],
"type": i18n_translator.query(f'[TRACK_PLAYBACK_TYPE_{self.nx_data["type"]}]'),
"desc": i18n_translator.query(self.nx_data["desc"]),
"timestamp": self.nx_data["timestamp"]
}

if self.iframe_data["iframe_parameter"]["url"] != "":
self.result["iframe_url"] = self.get_iframe_url(**self.iframe_data["iframe_parameter"])
else:
self.result["iframe_url"] = self.iframe_data["url_detail"]

def to_json(self):
return {
"id": self.result["id"],
"path": self.result["path"],
"type": self.result["type"].to_json(),
"desc": self.result["desc"].to_json(),
"timestamp": self.result["timestamp"],
"iframe_url": self.result["iframe_url"]
}

def to_json_basic(self):
return self.to_json()


class TrackPlaybackDataListManager(UserList, IToJson):
def load(self, data: list):
for i in data:
self.append(TrackPlaybackDataEntry(i))

def to_json(self):
return [i.to_json_basic() for i in self]

def to_json_basic(self):
return self.to_json()


class TrackPlayback(IToJson):
def __init__(self, instance_id: str):
self.instance_id = instance_id
self.all_data = TrackPlaybackDataListManager()

self.process()

def process(self):
try:
all_playback_data = constant_manager.query("track_playback", self.instance_id)

for entry in all_playback_data:
iframe_data = constant_manager.query("track_playback_ref", entry["path"])

self.all_data.append(TrackPlaybackDataEntry(entry, iframe_data))
except KeyError:
logging.warning(f"No TrackPlayback info for \"{self.instance_id}\"")

def to_json(self):
return self.all_data.to_json()

def to_json_basic(self):
return self.all_data.to_json_basic()
3 changes: 3 additions & 0 deletions data_model/actual_data/track.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from data_model.actual_data.used_by import BaseUsedBy, UsedByRegisterMixin, OrderedDictWithCounter, UsedByToJsonMixin
from data_model.actual_data.related_to import BaseRelatedTo, RelatedToJsonMixin, RelatedToRegisterMixin
from data_model.actual_data._track.track_version import *
from data_model.actual_data._track.track_playback import TrackPlayback
from data_model.actual_data.tag import TagListManager
from data_model.actual_data.composer import ComposerInfo

Expand Down Expand Up @@ -369,6 +370,7 @@ def __init__(self, **kwargs):

self.stats = TrackStats(data["stats"])
self.stats.second_init(self)
self.playback_info = TrackPlayback(self.instance_id)

self.bond_chars = []

Expand Down Expand Up @@ -400,6 +402,7 @@ def to_json(self):
"track_type": self.track_type,
"duration": self.duration,
"duration_splited": seconds_to_minutes(self.duration),
"playback_info": self.playback_info.to_json_basic(),

"composer": self.composer.to_json_basic(),
"tags": self.tags.to_json_basic(),
Expand Down
23 changes: 15 additions & 8 deletions data_model/loader/manager_constant.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,27 @@ def load(self):
for key in self.filelist:
with open(self.join_path(key), mode="r", encoding="UTF-8") as file:
temp = json.load(file)
id_ = temp["file_id"]
del temp["file_id"], temp["filetype"]
try:
id_ = temp["file_id"]
del temp["file_id"], temp["filetype"]
except KeyError:
# 默认fallback策略
id_ = os.path.splitext(key)[0]

self.constant[id_] = temp

def query(self, constant_id: str, value: str or int):
result = self.constant[constant_id][str(value)]
if "en" in result.keys():
# It's an LangStringModel object!
t = LangStringModel()
t.load(result)
return t
if isinstance(result, dict):
if "en" in result.keys():
# It's an LangStringModel object!
t = LangStringModel()
t.load(result)
return t
else:
# Maybe just a normal dict, like in `composer.json`
return result
else:
# Maybe just a normal dict, like in `composer.json`
return result

def query_by_constant_file(self, constant_id: str):
Expand Down
12 changes: 6 additions & 6 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
import json
import pickle
import time
import shutil
from functools import partial
Expand Down Expand Up @@ -29,7 +29,7 @@
BASE_EXPORT = "data_export"

join_base = partial(os.path.join, BASE_EXPORT)
dump_json = partial(json.dump, ensure_ascii=False)
dump_json = partial(pickle.dump)

# deleting old files
start_time = time.time()
Expand All @@ -51,7 +51,7 @@ def write_loader(target_loader):
loader = target_loader.loader
path = loader.get_path(filename=True)
create_export_dir(loader)
with open(join_base(path), mode="w", encoding="UTF-8") as file:
with open(join_base(path), mode="wb") as file:
dump_json(loader.to_json(), file)


Expand All @@ -60,7 +60,7 @@ def write_loader2(target_loader):
loader = target_loader
path = loader.get_path(filename=False)
os.makedirs(join_base(path), exist_ok=True)
with open(join_base(path, "_all.json"), mode="w", encoding="UTF-8") as file:
with open(join_base(path, "_all.json"), mode="wb") as file:
dump_json(loader.to_json(), file)


Expand All @@ -69,7 +69,7 @@ def write_loader3(target_loader):
loader = target_loader
path = loader.get_path(filename=False)
os.makedirs(join_base(os.path.split(path)[0]), exist_ok=True)
with open(join_base(path), mode="w", encoding="UTF-8") as file:
with open(join_base(path), mode="wb") as file:
dump_json(loader.to_json(), file)


Expand Down Expand Up @@ -115,7 +115,7 @@ def write_loader3(target_loader):
splited = path.split("/")
path = "/".join([*splited, splited[-1] + ".json"])

with open(join_base(path), mode="w", encoding="UTF-8") as file:
with open(join_base(path), mode="wb") as file:
dump_json(loader.to_json(), file)

if char.loader.filetype == -53:
Expand Down
5 changes: 2 additions & 3 deletions main_quick_test.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import time
import os
import json
import pickle
from functools import partial

from data_model.loader.loader_detect import get_loader_by_filepath
Expand All @@ -25,7 +24,7 @@
PostExecutionManager.execute_pool("background_character_direct")
print(f"Linking Stuff Together: {time.time() - start_time:0.2f}")

dump_json = partial(json.dumps, ensure_ascii=False)
dump_json = partial(pickle.dumps)


def write_loader(target_loader):
Expand Down

0 comments on commit 03e7b55

Please sign in to comment.