95 lines
2.3 KiB
Python
95 lines
2.3 KiB
Python
#!/usr/bin/env python3
|
||
|
||
|
||
import scripter
|
||
import config
|
||
import hashlib
|
||
import replacements
|
||
|
||
excludes = ['zamyo gorrino']
|
||
|
||
def open_onikakushi() -> str:
|
||
|
||
# Opening files, reading hashes
|
||
try:
|
||
with open(config.get('original_path'), 'rb') as f:
|
||
md5hash = hashlib.file_digest(f, "md5").hexdigest()
|
||
|
||
origfile = open(config.get('original_path'), 'r', encoding='shift_jisx0213')
|
||
|
||
except:
|
||
print(f"Could not open original script path {config.get('original_path')}")
|
||
exit(1)
|
||
|
||
outpath = 'tmp/onikakushi.txt'
|
||
outfile = open(outpath, 'w', encoding='shift_jisx0213')
|
||
|
||
repls = replacements.get_replacements(md5hash)
|
||
global excludes
|
||
excludes = replacements.get_excludes(md5hash)
|
||
|
||
for i, line in enumerate(origfile):
|
||
if i+1 not in repls:
|
||
outfile.write(fix_common_displaced_commands(line)+'\n')
|
||
else:
|
||
outfile.write(repls[i+1])
|
||
|
||
outfile.close()
|
||
origfile.close()
|
||
|
||
return outpath
|
||
|
||
|
||
# Fix the '「!s*' and '!sd。」@' plague
|
||
def fix_common_displaced_commands(line: str) -> str:
|
||
ret = ''
|
||
tokens = scripter.parse_line(line)
|
||
|
||
if len(tokens) > 1 and \
|
||
tokens[1].type == scripter.TokenType.COMMAND \
|
||
and tokens[0].token == ' ':
|
||
|
||
tokens.pop(0)
|
||
|
||
|
||
# '「!s*'
|
||
if len(tokens) > 2 and \
|
||
tokens[0].token == '「' and tokens[1].token.startswith('!s'):
|
||
# !s to beggining
|
||
tokens[0] = tokens[1]
|
||
# prepend 「 to next token
|
||
tokens[2].token = '「' + tokens[2].token
|
||
# remove 「 token
|
||
tokens.pop(1)
|
||
|
||
|
||
# '!sd。」'
|
||
if (
|
||
len(tokens) > 3
|
||
and tokens[-3].token == '!sd'
|
||
and tokens[-2].token in [ # quot
|
||
'。」',
|
||
'」',
|
||
'。',
|
||
'、」',
|
||
]
|
||
and tokens[-1].type == scripter.TokenType.COMMAND # final
|
||
):
|
||
# Get the final symbols
|
||
# [text][command]<[text][command]>
|
||
quot = tokens[-2].token
|
||
final = tokens[-1].token
|
||
# Append them to the previous ones (-3 and -4)
|
||
# <[text+text][command+command]>[text][command]
|
||
tokens[-4].token += quot
|
||
tokens[-3].token += final
|
||
# Trim the leftovers
|
||
# [text+text][command+command]<>
|
||
tokens = tokens[:-2]
|
||
|
||
|
||
for t in tokens:
|
||
ret += t.token
|
||
|
||
return ret
|