init
This commit is contained in:
commit
7a996fe5b8
|
|
@ -0,0 +1,8 @@
|
|||
localtest
|
||||
.DS_Store
|
||||
*.code-workspace
|
||||
.vscode
|
||||
/target
|
||||
/rust
|
||||
/legacy
|
||||
__pycache__
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2024 Jun Siang Cheah
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
# Palworld Host Save Fix No uesave
|
||||
|
||||
> ### Be careful of data loss and *always* make a backup.
|
||||
|
||||
Dependencies:
|
||||
- Python 3
|
||||
|
||||
Command:
|
||||
`python fix-savs.py <old_guid> <new_guid>`
|
||||
|
||||
`<new_guid>` - GUID of the player on the new server
|
||||
`<old_guid>` - GUID of the player from the old server
|
||||
|
||||
Example:
|
||||
`python fix-host-save.py 00000000000000000000000000000001 6A80B1A6000000000000000000000000`
|
||||
|
||||
## migrate a co-op save to a Linux dedicated server
|
||||
|
||||
Prerequisites:
|
||||
- Install the dependencies [above].
|
||||
- The dedicated server is installed, running, and you're able to join it.
|
||||
- No viewing cage support.
|
||||
|
||||
Steps:
|
||||
1. Copy your desired save's folder from `C:\Users\<username>\AppData\Local\Pal\Saved\SaveGames\<random_numbers>` to your dedicated server.
|
||||
2. In the `PalServer\Pal\Saved\Config\LinuxServer\GameUserSettings.ini` file, change the `DedicatedServerName` to match your save's folder name. For example, if your save's folder name is `2E85FD38BAA792EB1D4C09386F3A3CDA`, the `DedicatedServerName` changes to `DedicatedServerName=2E85FD38BAA792EB1D4C09386F3A3CDA`.
|
||||
3. Delete `PalServer\Pal\Saved\SaveGames\0\<your_save_here>\WorldOption.sav` to allow modification of `PalWorldSettings.ini`. Players will have to choose their respawn point again, but nothing else is affected as far as I can tell.
|
||||
4. Confirm you can connect to your save on the dedicated server and that the world is the one in the save. You can check the world with a character that belongs to a regular player from the co-op.
|
||||
5. Afterwards, the co-op host must create a new character on the dedicated server. A new `.sav` file should appear in `PalServer\Pal\Saved\SaveGames\0\<your_save_here>\Players`.
|
||||
6. The name of that new `.sav` file is the co-op host's new GUID. We will need the co-op host's new GUID for the script to work.
|
||||
7. Shut the server down and then copy `PalServer\Pal\Saved\SaveGames\0\<your_save_here>/Level.sav` and `PalServer\Pal\Saved\SaveGames\0\<your_save_here>/Players/<co-op_sav_file>` to the `sav/` folder in the tool
|
||||
8. **Make a backup of your save!** This is an experimental script and has known bugs so always keep a backup copy of your save.
|
||||
9. Run the script using the command in the [Usage section](#usage) with the information you've gathered and using `00000000000000000000000000000001` as the co-op host's old GUID.
|
||||
10. Copy the save from the temporary folder back to the dedicated server. Move the save you had in the dedicated server somewhere else or rename it to something different.
|
||||
11. Start the server back up and have the co-op host join the server with their fixed character.
|
||||
|
||||
|
||||
|
||||
|
||||
This uses cheahjs https://github.com/cheahjs/palworld-save-tools for converting sav to json and back.
|
||||
|
||||
Steps from xNul https://github.com/xNul/palworld-host-save-fix
|
||||
|
|
@ -0,0 +1,122 @@
|
|||
import sys
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
|
||||
from lib.gvas import GvasFile
|
||||
from lib.noindent import CustomEncoder
|
||||
from lib.palsav import compress_gvas_to_sav, decompress_sav_to_gvas
|
||||
from lib.paltypes import PALWORLD_CUSTOM_PROPERTIES, PALWORLD_TYPE_HINTS
|
||||
|
||||
|
||||
def main(sav_file: str, guid: str):
|
||||
if sav_file[-4:] == ".sav":
|
||||
sav_file = sav_file[:-4]
|
||||
if guid[-4:] == ".sav":
|
||||
guid = guid[:-4]
|
||||
|
||||
convert_sav_to_json(filename=f"savs/{sav_file}.sav", output_path=f"./savs/{sav_file}.json")
|
||||
# do work on user.sav
|
||||
edit_user_json(sav_file, guid)
|
||||
|
||||
convert_json_to_sav(filename=f"savs/{sav_file}.json", output_path=f"./savs/{guid}.sav")
|
||||
os.remove(f"savs/{sav_file}.json")
|
||||
|
||||
convert_sav_to_json(filename="savs/Level.sav", output_path="./savs/Level.json")
|
||||
os.remove("savs/Level.sav")
|
||||
# do work on level.sav
|
||||
edit_level_json(sav_file, guid)
|
||||
|
||||
convert_json_to_sav(filename="savs/Level.json", output_path="./savs/Level.sav")
|
||||
os.remove("savs/Level.json")
|
||||
|
||||
|
||||
def format_id_string(guid: str):
|
||||
return f"{guid[0:8]}-{guid[8:12]}-{guid[12:16]}-{guid[16:20]}-{guid[20:]}"
|
||||
|
||||
|
||||
def edit_user_json(old_id: str, new_id: str):
|
||||
filename = f"savs/{old_id}.json"
|
||||
old_id = format_id_string(old_id)
|
||||
new_id = format_id_string(new_id)
|
||||
with open(filename, "r+") as old_file:
|
||||
data = str(json.load(old_file))
|
||||
new_data = eval(re.sub(old_id, new_id, data, flags=re.I)) # eval(data.replace(old_id, new_id))
|
||||
os.remove(filename)
|
||||
with open(filename, 'w') as new_file:
|
||||
indent = "\t"
|
||||
json.dump(new_data, new_file, indent=indent)
|
||||
|
||||
|
||||
def edit_level_json(old_id: str, new_id: str):
|
||||
filler_id = "00000000-0000-0000-0000-000000000006"
|
||||
filename = "savs/Level.json"
|
||||
old_id = format_id_string(old_id)
|
||||
new_id = format_id_string(new_id)
|
||||
with open(filename, "r+") as old_file:
|
||||
data = str(json.load(old_file))
|
||||
temp_data = re.sub(new_id, filler_id, data, flags=re.I) # data.replace(new_id, filler_id)
|
||||
new_data = eval(re.sub(old_id, new_id, temp_data, flags=re.I)) # eval(temp_data.replace(old_id, new_id))
|
||||
os.remove(filename)
|
||||
with open(filename, 'w') as new_file:
|
||||
indent = "\t"
|
||||
json.dump(new_data, new_file, indent=indent)
|
||||
|
||||
|
||||
def convert_sav_to_json(filename: str, output_path: str):
|
||||
minify = False
|
||||
print(f"Converting {filename} to JSON, saving to {output_path}")
|
||||
if os.path.exists(output_path):
|
||||
print(f"{output_path} already exists, this will overwrite the file")
|
||||
if not confirm_prompt("Are you sure you want to continue?"):
|
||||
exit(1)
|
||||
print(f"Decompressing sav file")
|
||||
with open(filename, "rb") as f:
|
||||
data = f.read()
|
||||
raw_gvas, _ = decompress_sav_to_gvas(data)
|
||||
print(f"Loading GVAS file")
|
||||
gvas_file = GvasFile.read(raw_gvas, PALWORLD_TYPE_HINTS, PALWORLD_CUSTOM_PROPERTIES)
|
||||
print(f"Writing JSON to {output_path}")
|
||||
with open(output_path, "w", encoding="utf8") as f:
|
||||
indent = None if minify else "\t"
|
||||
json.dump(gvas_file.dump(), f, indent=indent, cls=CustomEncoder)
|
||||
|
||||
|
||||
|
||||
def convert_json_to_sav(filename: str, output_path: str):
|
||||
print(f"Converting {filename} to SAV, saving to {output_path}")
|
||||
if os.path.exists(output_path):
|
||||
print(f"{output_path} already exists, this will overwrite the file")
|
||||
if not confirm_prompt("Are you sure you want to continue?"):
|
||||
exit(1)
|
||||
print(f"Loading JSON from {filename}")
|
||||
with open(filename, "r", encoding="utf8") as f:
|
||||
data = json.load(f)
|
||||
gvas_file = GvasFile.load(data)
|
||||
print(f"Compressing SAV file")
|
||||
if (
|
||||
"Pal.PalWorldSaveGame" in gvas_file.header.save_game_class_name
|
||||
or "Pal.PalLocalWorldSaveGame" in gvas_file.header.save_game_class_name
|
||||
):
|
||||
save_type = 0x32
|
||||
else:
|
||||
save_type = 0x31
|
||||
sav_file = compress_gvas_to_sav(
|
||||
gvas_file.write(PALWORLD_CUSTOM_PROPERTIES), save_type
|
||||
)
|
||||
print(f"Writing SAV file to {output_path}")
|
||||
with open(output_path, "wb") as f:
|
||||
f.write(sav_file)
|
||||
|
||||
|
||||
def confirm_prompt(question: str) -> bool:
|
||||
reply = None
|
||||
while reply not in ("y", "n"):
|
||||
reply = input(f"{question} (y/n): ").casefold()
|
||||
return reply == "y"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
a = sys.argv[1]
|
||||
b = sys.argv[2]
|
||||
main(sav_file=a, guid=b)
|
||||
|
|
@ -0,0 +1,846 @@
|
|||
import io
|
||||
import math
|
||||
import os
|
||||
import struct
|
||||
import uuid
|
||||
from typing import Any, Callable, Optional, Union
|
||||
|
||||
|
||||
def instance_id_reader(reader: "FArchiveReader"):
|
||||
return {
|
||||
"guid": reader.guid(),
|
||||
"instance_id": reader.guid(),
|
||||
}
|
||||
|
||||
|
||||
def uuid_reader(reader: "FArchiveReader"):
|
||||
b = reader.read(16)
|
||||
if len(b) != 16:
|
||||
raise Exception("could not read 16 bytes for uuid")
|
||||
return uuid.UUID(
|
||||
bytes=bytes(
|
||||
[
|
||||
b[0x3],
|
||||
b[0x2],
|
||||
b[0x1],
|
||||
b[0x0],
|
||||
b[0x7],
|
||||
b[0x6],
|
||||
b[0x5],
|
||||
b[0x4],
|
||||
b[0xB],
|
||||
b[0xA],
|
||||
b[0x9],
|
||||
b[0x8],
|
||||
b[0xF],
|
||||
b[0xE],
|
||||
b[0xD],
|
||||
b[0xC],
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class FArchiveReader:
|
||||
data: io.BytesIO
|
||||
size: int
|
||||
type_hints: dict[str, str]
|
||||
custom_properties: dict[str, tuple[Callable, Callable]]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data,
|
||||
type_hints: dict[str, str] = {},
|
||||
custom_properties: dict[str, tuple[Callable, Callable]] = {},
|
||||
):
|
||||
self.data = io.BytesIO(data)
|
||||
self.size = len(self.data.read())
|
||||
self.data.seek(0)
|
||||
self.type_hints = type_hints
|
||||
self.custom_properties = custom_properties
|
||||
|
||||
def __enter__(self):
|
||||
self.size = len(self.data.read())
|
||||
self.data.seek(0)
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
self.data.close()
|
||||
|
||||
def get_type_or(self, path: str, default: str):
|
||||
if path in self.type_hints:
|
||||
return self.type_hints[path]
|
||||
else:
|
||||
print(f"Struct type for {path} not found, assuming {default}")
|
||||
return default
|
||||
|
||||
def eof(self) -> bool:
|
||||
return self.data.tell() >= self.size
|
||||
|
||||
def read(self, size: int) -> bytes:
|
||||
return self.data.read(size)
|
||||
|
||||
def read_to_end(self) -> bytes:
|
||||
return self.data.read(self.size - self.data.tell())
|
||||
|
||||
def bool(self) -> bool:
|
||||
return self.byte() > 0
|
||||
|
||||
def fstring(self) -> str:
|
||||
size = self.i32()
|
||||
LoadUCS2Char: bool = size < 0
|
||||
|
||||
if LoadUCS2Char:
|
||||
if size == -2147483648:
|
||||
raise Exception("Archive is corrupted.")
|
||||
|
||||
size = -size
|
||||
|
||||
if size == 0:
|
||||
return ""
|
||||
|
||||
data: bytes
|
||||
encoding: str
|
||||
if LoadUCS2Char:
|
||||
data = self.read(size * 2)[:-2]
|
||||
encoding = "utf-16-le"
|
||||
else:
|
||||
data = self.read(size)[:-1]
|
||||
encoding = "ascii"
|
||||
try:
|
||||
return data.decode(encoding)
|
||||
except Exception as e:
|
||||
try:
|
||||
escaped = data.decode(encoding, errors="surrogatepass")
|
||||
print(
|
||||
f"Error decoding {encoding} string of length {size}, data loss may occur! {bytes(data)}"
|
||||
)
|
||||
return escaped
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
f"Error decoding {encoding} string of length {size}: {bytes(data)}"
|
||||
) from e
|
||||
|
||||
def i16(self) -> int:
|
||||
return struct.unpack("h", self.data.read(2))[0]
|
||||
|
||||
def u16(self) -> int:
|
||||
return struct.unpack("H", self.data.read(2))[0]
|
||||
|
||||
def i32(self) -> int:
|
||||
return struct.unpack("i", self.data.read(4))[0]
|
||||
|
||||
def u32(self) -> int:
|
||||
return struct.unpack("I", self.data.read(4))[0]
|
||||
|
||||
def i64(self) -> int:
|
||||
return struct.unpack("q", self.data.read(8))[0]
|
||||
|
||||
def u64(self) -> int:
|
||||
return struct.unpack("Q", self.data.read(8))[0]
|
||||
|
||||
def float(self) -> float:
|
||||
return struct.unpack("f", self.data.read(4))[0]
|
||||
|
||||
def double(self) -> float:
|
||||
return struct.unpack("d", self.data.read(8))[0]
|
||||
|
||||
def byte(self) -> int:
|
||||
return struct.unpack("B", self.data.read(1))[0]
|
||||
|
||||
def byte_list(self, size: int) -> list[int]:
|
||||
return struct.unpack(str(size) + "B", self.data.read(size))
|
||||
|
||||
def skip(self, size: int) -> None:
|
||||
self.data.read(size)
|
||||
|
||||
def guid(self) -> uuid.UUID:
|
||||
return uuid_reader(self)
|
||||
|
||||
def optional_guid(self) -> Optional[uuid.UUID]:
|
||||
return uuid_reader(self) if self.bool() else None
|
||||
|
||||
def tarray(
|
||||
self, type_reader: Callable[["FArchiveReader"], dict[str, Any]]
|
||||
) -> list[dict[str, Any]]:
|
||||
count = self.u32()
|
||||
array = []
|
||||
for _ in range(count):
|
||||
array.append(type_reader(self))
|
||||
return array
|
||||
|
||||
def properties_until_end(self, path: str = "") -> dict[str, Any]:
|
||||
properties = {}
|
||||
while True:
|
||||
name = self.fstring()
|
||||
if name == "None":
|
||||
break
|
||||
type_name = self.fstring()
|
||||
size = self.u64()
|
||||
properties[name] = self.property(type_name, size, f"{path}.{name}")
|
||||
return properties
|
||||
|
||||
def property(
|
||||
self, type_name: str, size: int, path: str, allow_custom: bool = True
|
||||
) -> dict[str, Any]:
|
||||
value = {}
|
||||
if allow_custom and path in self.custom_properties:
|
||||
value = self.custom_properties[path][0](self, type_name, size, path)
|
||||
value["custom_type"] = path
|
||||
elif type_name == "StructProperty":
|
||||
value = self.struct(path)
|
||||
elif type_name == "IntProperty":
|
||||
value = {
|
||||
"id": self.optional_guid(),
|
||||
"value": self.i32(),
|
||||
}
|
||||
elif type_name == "Int64Property":
|
||||
value = {
|
||||
"id": self.optional_guid(),
|
||||
"value": self.i64(),
|
||||
}
|
||||
elif type_name == "FixedPoint64Property":
|
||||
value = {
|
||||
"id": self.optional_guid(),
|
||||
"value": self.i32(),
|
||||
}
|
||||
elif type_name == "FloatProperty":
|
||||
value = {
|
||||
"id": self.optional_guid(),
|
||||
"value": self.float(),
|
||||
}
|
||||
elif type_name == "StrProperty":
|
||||
value = {
|
||||
"id": self.optional_guid(),
|
||||
"value": self.fstring(),
|
||||
}
|
||||
elif type_name == "NameProperty":
|
||||
value = {
|
||||
"id": self.optional_guid(),
|
||||
"value": self.fstring(),
|
||||
}
|
||||
elif type_name == "EnumProperty":
|
||||
enum_type = self.fstring()
|
||||
_id = self.optional_guid()
|
||||
enum_value = self.fstring()
|
||||
value = {
|
||||
"id": _id,
|
||||
"value": {
|
||||
"type": enum_type,
|
||||
"value": enum_value,
|
||||
},
|
||||
}
|
||||
elif type_name == "BoolProperty":
|
||||
value = {
|
||||
"value": self.bool(),
|
||||
"id": self.optional_guid(),
|
||||
}
|
||||
elif type_name == "ArrayProperty":
|
||||
array_type = self.fstring()
|
||||
value = {
|
||||
"array_type": array_type,
|
||||
"id": self.optional_guid(),
|
||||
"value": self.array_property(array_type, size - 4, path),
|
||||
}
|
||||
elif type_name == "MapProperty":
|
||||
key_type = self.fstring()
|
||||
value_type = self.fstring()
|
||||
_id = self.optional_guid()
|
||||
self.u32()
|
||||
count = self.u32()
|
||||
values = {}
|
||||
key_path = path + ".Key"
|
||||
if key_type == "StructProperty":
|
||||
key_struct_type = self.get_type_or(key_path, "Guid")
|
||||
else:
|
||||
key_struct_type = None
|
||||
value_path = path + ".Value"
|
||||
if value_type == "StructProperty":
|
||||
value_struct_type = self.get_type_or(value_path, "StructProperty")
|
||||
else:
|
||||
value_struct_type = None
|
||||
values = []
|
||||
for _ in range(count):
|
||||
key = self.prop_value(key_type, key_struct_type, key_path)
|
||||
value = self.prop_value(value_type, value_struct_type, value_path)
|
||||
values.append(
|
||||
{
|
||||
"key": key,
|
||||
"value": value,
|
||||
}
|
||||
)
|
||||
value = {
|
||||
"key_type": key_type,
|
||||
"value_type": value_type,
|
||||
"key_struct_type": key_struct_type,
|
||||
"value_struct_type": value_struct_type,
|
||||
"id": _id,
|
||||
"value": values,
|
||||
}
|
||||
else:
|
||||
raise Exception(f"Unknown type: {type_name} ({path})")
|
||||
value["type"] = type_name
|
||||
return value
|
||||
|
||||
def prop_value(self, type_name: str, struct_type_name: str, path: str):
|
||||
if type_name == "StructProperty":
|
||||
return self.struct_value(struct_type_name, path)
|
||||
elif type_name == "EnumProperty":
|
||||
return self.fstring()
|
||||
elif type_name == "NameProperty":
|
||||
return self.fstring()
|
||||
elif type_name == "IntProperty":
|
||||
return self.i32()
|
||||
elif type_name == "BoolProperty":
|
||||
return self.bool()
|
||||
else:
|
||||
raise Exception(f"Unknown property value type: {type_name} ({path})")
|
||||
|
||||
def struct(self, path: str) -> dict[str, Any]:
|
||||
struct_type = self.fstring()
|
||||
struct_id = self.guid()
|
||||
_id = self.optional_guid()
|
||||
value = self.struct_value(struct_type, path)
|
||||
return {
|
||||
"struct_type": struct_type,
|
||||
"struct_id": struct_id,
|
||||
"id": _id,
|
||||
"value": value,
|
||||
}
|
||||
|
||||
def struct_value(self, struct_type: str, path: str = ""):
|
||||
if struct_type == "Vector":
|
||||
return {
|
||||
"x": self.double(),
|
||||
"y": self.double(),
|
||||
"z": self.double(),
|
||||
}
|
||||
elif struct_type == "DateTime":
|
||||
return self.u64()
|
||||
elif struct_type == "Guid":
|
||||
return self.guid()
|
||||
elif struct_type == "Quat":
|
||||
return {
|
||||
"x": self.double(),
|
||||
"y": self.double(),
|
||||
"z": self.double(),
|
||||
"w": self.double(),
|
||||
}
|
||||
elif struct_type == "LinearColor":
|
||||
return {
|
||||
"r": self.float(),
|
||||
"g": self.float(),
|
||||
"b": self.float(),
|
||||
"a": self.float(),
|
||||
}
|
||||
else:
|
||||
if os.environ.get("DEBUG", "0") == "1":
|
||||
print(f"Assuming struct type: {struct_type} ({path})")
|
||||
return self.properties_until_end(path)
|
||||
|
||||
def array_property(self, array_type: str, size: int, path: str):
|
||||
count = self.u32()
|
||||
value = {}
|
||||
if array_type == "StructProperty":
|
||||
prop_name = self.fstring()
|
||||
prop_type = self.fstring()
|
||||
self.u64()
|
||||
type_name = self.fstring()
|
||||
_id = self.guid()
|
||||
self.skip(1)
|
||||
prop_values = []
|
||||
for _ in range(count):
|
||||
prop_values.append(self.struct_value(type_name, f"{path}.{prop_name}"))
|
||||
value = {
|
||||
"prop_name": prop_name,
|
||||
"prop_type": prop_type,
|
||||
"values": prop_values,
|
||||
"type_name": type_name,
|
||||
"id": _id,
|
||||
}
|
||||
else:
|
||||
value = {
|
||||
"values": self.array_value(array_type, count, size, path),
|
||||
}
|
||||
return value
|
||||
|
||||
def array_value(self, array_type: str, count: int, size: int, path: str):
|
||||
values = []
|
||||
for _ in range(count):
|
||||
if array_type == "EnumProperty":
|
||||
values.append(self.fstring())
|
||||
elif array_type == "NameProperty":
|
||||
values.append(self.fstring())
|
||||
elif array_type == "Guid":
|
||||
values.append(self.guid())
|
||||
elif array_type == "ByteProperty":
|
||||
if size == count:
|
||||
values.append(self.byte())
|
||||
else:
|
||||
raise Exception("Labelled ByteProperty not implemented")
|
||||
else:
|
||||
raise Exception(f"Unknown array type: {array_type} ({path})")
|
||||
return values
|
||||
|
||||
def compressed_short_rotator(self) -> tuple[float, float, float]:
|
||||
short_pitch = self.u16() if self.bool() else 0
|
||||
short_yaw = self.u16() if self.bool() else 0
|
||||
short_roll = self.u16() if self.bool() else 0
|
||||
pitch = short_pitch * (360.0 / 65536.0)
|
||||
yaw = short_yaw * (360.0 / 65536.0)
|
||||
roll = short_roll * (360.0 / 65536.0)
|
||||
return [pitch, yaw, roll]
|
||||
|
||||
def serializeint(self, component_bit_count: int) -> int:
|
||||
b = bytearray(self.read((component_bit_count + 7) // 8))
|
||||
if (component_bit_count % 8) != 0:
|
||||
b[-1] &= (1 << (component_bit_count % 8)) - 1
|
||||
value = int.from_bytes(b, "little")
|
||||
return value
|
||||
|
||||
def packed_vector(self, scale_factor: int) -> tuple[float, float, float]:
|
||||
component_bit_count_and_extra_info = self.u32()
|
||||
component_bit_count = component_bit_count_and_extra_info & 63
|
||||
extra_info = component_bit_count_and_extra_info >> 6
|
||||
if component_bit_count > 0:
|
||||
x = self.serializeint(component_bit_count)
|
||||
y = self.serializeint(component_bit_count)
|
||||
z = self.serializeint(component_bit_count)
|
||||
sign_bit = 1 << (component_bit_count - 1)
|
||||
x = (x & (sign_bit - 1)) - (x & sign_bit)
|
||||
y = (y & (sign_bit - 1)) - (y & sign_bit)
|
||||
z = (z & (sign_bit - 1)) - (z & sign_bit)
|
||||
|
||||
if extra_info:
|
||||
x /= scale_factor
|
||||
y /= scale_factor
|
||||
z /= scale_factor
|
||||
return (x, y, z)
|
||||
else:
|
||||
received_scaler_type_size = 8 if extra_info else 4
|
||||
if received_scaler_type_size == 8:
|
||||
x = self.double()
|
||||
y = self.double()
|
||||
z = self.double()
|
||||
return (x, y, z)
|
||||
else:
|
||||
x = self.float()
|
||||
y = self.float()
|
||||
z = self.float()
|
||||
return (x, y, z)
|
||||
|
||||
def ftransform(self) -> dict[str, dict[str, float]]:
|
||||
return {
|
||||
"rotation": {
|
||||
"x": self.double(),
|
||||
"y": self.double(),
|
||||
"z": self.double(),
|
||||
"w": self.double(),
|
||||
},
|
||||
"translation": {
|
||||
"x": self.double(),
|
||||
"y": self.double(),
|
||||
"z": self.double(),
|
||||
},
|
||||
"scale3d": {
|
||||
"x": self.double(),
|
||||
"y": self.double(),
|
||||
"z": self.double(),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def uuid_writer(writer, s: Union[str, uuid.UUID]):
|
||||
if isinstance(s, str):
|
||||
u = uuid.UUID(s)
|
||||
b = u.bytes
|
||||
else:
|
||||
b = s.bytes
|
||||
ub = bytes(
|
||||
[
|
||||
b[0x3],
|
||||
b[0x2],
|
||||
b[0x1],
|
||||
b[0x0],
|
||||
b[0x7],
|
||||
b[0x6],
|
||||
b[0x5],
|
||||
b[0x4],
|
||||
b[0xB],
|
||||
b[0xA],
|
||||
b[0x9],
|
||||
b[0x8],
|
||||
b[0xF],
|
||||
b[0xE],
|
||||
b[0xD],
|
||||
b[0xC],
|
||||
]
|
||||
)
|
||||
writer.write(ub)
|
||||
|
||||
|
||||
def instance_id_writer(writer, d):
|
||||
uuid_writer(writer, d["guid"])
|
||||
uuid_writer(writer, d["instance_id"])
|
||||
|
||||
|
||||
class FArchiveWriter:
|
||||
data: io.BytesIO
|
||||
size: int
|
||||
custom_properties: dict[str, tuple[Callable, Callable]]
|
||||
|
||||
def __init__(self, custom_properties: dict[str, tuple[Callable, Callable]] = {}):
|
||||
self.data = io.BytesIO()
|
||||
self.custom_properties = custom_properties
|
||||
|
||||
def __enter__(self):
|
||||
self.data.seek(0)
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
self.data.close()
|
||||
|
||||
def copy(self) -> "FArchiveWriter":
|
||||
return FArchiveWriter(self.custom_properties)
|
||||
|
||||
def bytes(self) -> bytes:
|
||||
pos = self.data.tell()
|
||||
self.data.seek(0)
|
||||
b = self.data.read()
|
||||
self.data.seek(pos)
|
||||
return b
|
||||
|
||||
def write(self, data: bytes):
|
||||
self.data.write(data)
|
||||
|
||||
def bool(self, bool: bool):
|
||||
self.data.write(struct.pack("?", bool))
|
||||
|
||||
def fstring(self, string: str) -> int:
|
||||
start = self.data.tell()
|
||||
if string == "":
|
||||
self.i32(0)
|
||||
elif string.isascii():
|
||||
str_bytes = string.encode("ascii")
|
||||
self.i32(len(str_bytes) + 1)
|
||||
self.data.write(str_bytes)
|
||||
self.data.write(b"\x00")
|
||||
else:
|
||||
str_bytes = string.encode("utf-16-le", errors="surrogatepass")
|
||||
assert len(str_bytes) % 2 == 0
|
||||
self.i32(-((len(str_bytes) // 2) + 1))
|
||||
self.data.write(str_bytes)
|
||||
self.data.write(b"\x00\x00")
|
||||
return self.data.tell() - start
|
||||
|
||||
def i16(self, i: int):
|
||||
self.data.write(struct.pack("h", i))
|
||||
|
||||
def u16(self, i: int):
|
||||
self.data.write(struct.pack("H", i))
|
||||
|
||||
def i32(self, i: int):
|
||||
self.data.write(struct.pack("i", i))
|
||||
|
||||
def u32(self, i: int):
|
||||
self.data.write(struct.pack("I", i))
|
||||
|
||||
def i64(self, i: int):
|
||||
self.data.write(struct.pack("q", i))
|
||||
|
||||
def u64(self, i: int):
|
||||
self.data.write(struct.pack("Q", i))
|
||||
|
||||
def float(self, i: float):
|
||||
self.data.write(struct.pack("f", i))
|
||||
|
||||
def double(self, i: float):
|
||||
self.data.write(struct.pack("d", i))
|
||||
|
||||
def byte(self, b: int):
|
||||
self.data.write(bytes([b]))
|
||||
|
||||
def u(self, b: int):
|
||||
self.data.write(struct.pack("B", b))
|
||||
|
||||
def guid(self, u: Union[str, uuid.UUID]):
|
||||
uuid_writer(self, u)
|
||||
|
||||
def optional_uuid(self, u: Optional[Union[str, uuid.UUID]]):
|
||||
if u is None:
|
||||
self.bool(False)
|
||||
else:
|
||||
self.bool(True)
|
||||
uuid_writer(self, u)
|
||||
|
||||
def tarray(
|
||||
self, type_writer: Callable[["FArchiveWriter", dict[str, Any]], None], array
|
||||
):
|
||||
self.u32(len(array))
|
||||
for i in range(len(array)):
|
||||
type_writer(self, array[i])
|
||||
|
||||
def properties(self, properties: dict[str, Any]):
|
||||
for key in properties:
|
||||
self.fstring(key)
|
||||
self.property(properties[key])
|
||||
self.fstring("None")
|
||||
|
||||
def property(self, property: dict[str, Any]):
|
||||
# write type_name
|
||||
self.fstring(property["type"])
|
||||
nested_writer = self.copy()
|
||||
size: int
|
||||
property_type = property["type"]
|
||||
size = nested_writer.property_inner(property_type, property)
|
||||
buf = nested_writer.bytes()
|
||||
# write size
|
||||
self.u64(size)
|
||||
self.write(buf)
|
||||
|
||||
def property_inner(self, property_type: str, property: dict[str, Any]) -> int:
|
||||
if "custom_type" in property:
|
||||
if property["custom_type"] in self.custom_properties:
|
||||
size = self.custom_properties[property["custom_type"]][1](
|
||||
self, property_type, property
|
||||
)
|
||||
else:
|
||||
raise Exception(
|
||||
f"Unknown custom property type: {property['custom_type']}"
|
||||
)
|
||||
elif property_type == "StructProperty":
|
||||
size = self.struct(property)
|
||||
elif property_type == "IntProperty":
|
||||
self.optional_uuid(property.get("id", None))
|
||||
self.i32(property["value"])
|
||||
size = 4
|
||||
elif property_type == "Int64Property":
|
||||
self.optional_uuid(property.get("id", None))
|
||||
self.i64(property["value"])
|
||||
size = 8
|
||||
elif property_type == "FixedPoint64Property":
|
||||
self.optional_uuid(property.get("id", None))
|
||||
self.i32(property["value"])
|
||||
size = 4
|
||||
elif property_type == "FloatProperty":
|
||||
self.optional_uuid(property.get("id", None))
|
||||
self.float(property["value"])
|
||||
size = 4
|
||||
elif property_type == "StrProperty":
|
||||
self.optional_uuid(property.get("id", None))
|
||||
size = self.fstring(property["value"])
|
||||
elif property_type == "NameProperty":
|
||||
self.optional_uuid(property.get("id", None))
|
||||
size = self.fstring(property["value"])
|
||||
elif property_type == "EnumProperty":
|
||||
self.fstring(property["value"]["type"])
|
||||
self.optional_uuid(property.get("id", None))
|
||||
size = self.fstring(property["value"]["value"])
|
||||
elif property_type == "BoolProperty":
|
||||
self.bool(property["value"])
|
||||
self.optional_uuid(property.get("id", None))
|
||||
size = 0
|
||||
elif property_type == "ArrayProperty":
|
||||
self.fstring(property["array_type"])
|
||||
self.optional_uuid(property.get("id", None))
|
||||
array_writer = self.copy()
|
||||
array_writer.array_property(property["array_type"], property["value"])
|
||||
array_buf = array_writer.bytes()
|
||||
size = len(array_buf)
|
||||
self.write(array_buf)
|
||||
elif property_type == "MapProperty":
|
||||
self.fstring(property["key_type"])
|
||||
self.fstring(property["value_type"])
|
||||
self.optional_uuid(property.get("id", None))
|
||||
map_writer = self.copy()
|
||||
map_writer.u32(0)
|
||||
map_writer.u32(len(property["value"]))
|
||||
for entry in property["value"]:
|
||||
map_writer.prop_value(
|
||||
property["key_type"], property["key_struct_type"], entry["key"]
|
||||
)
|
||||
map_writer.prop_value(
|
||||
property["value_type"],
|
||||
property["value_struct_type"],
|
||||
entry["value"],
|
||||
)
|
||||
map_buf = map_writer.bytes()
|
||||
size = len(map_buf)
|
||||
self.write(map_buf)
|
||||
else:
|
||||
raise Exception(f"Unknown property type: {property_type}")
|
||||
return size
|
||||
|
||||
def struct(self, property: dict[str, Any]) -> int:
|
||||
self.fstring(property["struct_type"])
|
||||
self.guid(property["struct_id"])
|
||||
self.optional_uuid(property.get("id", None))
|
||||
start = self.data.tell()
|
||||
self.struct_value(property["struct_type"], property["value"])
|
||||
return self.data.tell() - start
|
||||
|
||||
def struct_value(self, struct_type: str, value):
|
||||
if struct_type == "Vector":
|
||||
self.double(value["x"])
|
||||
self.double(value["y"])
|
||||
self.double(value["z"])
|
||||
elif struct_type == "DateTime":
|
||||
self.u64(value)
|
||||
elif struct_type == "Guid":
|
||||
self.guid(value)
|
||||
elif struct_type == "Quat":
|
||||
self.double(value["x"])
|
||||
self.double(value["y"])
|
||||
self.double(value["z"])
|
||||
self.double(value["w"])
|
||||
elif struct_type == "LinearColor":
|
||||
self.float(value["r"])
|
||||
self.float(value["g"])
|
||||
self.float(value["b"])
|
||||
self.float(value["a"])
|
||||
else:
|
||||
if os.environ.get("DEBUG", "0") == "1":
|
||||
print(f"Assuming struct type: {struct_type}")
|
||||
return self.properties(value)
|
||||
|
||||
def prop_value(self, type_name: str, struct_type_name: str, value):
|
||||
if type_name == "StructProperty":
|
||||
self.struct_value(struct_type_name, value)
|
||||
elif type_name == "EnumProperty":
|
||||
self.fstring(value)
|
||||
elif type_name == "NameProperty":
|
||||
self.fstring(value)
|
||||
elif type_name == "IntProperty":
|
||||
self.i32(value)
|
||||
elif type_name == "BoolProperty":
|
||||
self.bool(value)
|
||||
else:
|
||||
raise Exception(f"Unknown property value type: {type_name}")
|
||||
|
||||
def array_property(self, array_type: str, value: dict[str, Any]):
|
||||
count = len(value["values"])
|
||||
self.u32(count)
|
||||
if array_type == "StructProperty":
|
||||
self.fstring(value["prop_name"])
|
||||
self.fstring(value["prop_type"])
|
||||
nested_writer = self.copy()
|
||||
for i in range(count):
|
||||
nested_writer.struct_value(value["type_name"], value["values"][i])
|
||||
data_buf = nested_writer.bytes()
|
||||
self.u64(len(data_buf))
|
||||
self.fstring(value["type_name"])
|
||||
self.guid(value["id"])
|
||||
self.u(0)
|
||||
self.write(data_buf)
|
||||
else:
|
||||
self.array_value(array_type, count, value["values"])
|
||||
|
||||
def array_value(self, array_type: str, count: int, values: list[Any]):
|
||||
for i in range(count):
|
||||
if array_type == "IntProperty":
|
||||
self.i32(values[i])
|
||||
elif array_type == "Int64Property":
|
||||
self.i64(values[i])
|
||||
elif array_type == "FloatProperty":
|
||||
self.float(values[i])
|
||||
elif array_type == "StrProperty":
|
||||
self.fstring(values[i])
|
||||
elif array_type == "NameProperty":
|
||||
self.fstring(values[i])
|
||||
elif array_type == "EnumProperty":
|
||||
self.fstring(values[i])
|
||||
elif array_type == "BoolProperty":
|
||||
self.bool(values[i])
|
||||
elif array_type == "ByteProperty":
|
||||
self.byte(values[i])
|
||||
else:
|
||||
raise Exception(f"Unknown array type: {array_type}")
|
||||
|
||||
def compressed_short_rotator(self, pitch: float, yaw: float, roll: float):
|
||||
short_pitch = round(pitch * (65536.0 / 360.0)) & 0xFFFF
|
||||
short_yaw = round(yaw * (65536.0 / 360.0)) & 0xFFFF
|
||||
short_roll = round(roll * (65536.0 / 360.0)) & 0xFFFF
|
||||
if short_pitch != 0:
|
||||
self.bool(True)
|
||||
self.u16(short_pitch)
|
||||
else:
|
||||
self.bool(False)
|
||||
if short_yaw != 0:
|
||||
self.bool(True)
|
||||
self.u16(short_yaw)
|
||||
else:
|
||||
self.bool(False)
|
||||
if short_roll != 0:
|
||||
self.bool(True)
|
||||
self.u16(short_roll)
|
||||
else:
|
||||
self.bool(False)
|
||||
|
||||
@staticmethod
|
||||
def unreal_round_float_to_int(value: float) -> int:
|
||||
return int(value)
|
||||
|
||||
@staticmethod
|
||||
def unreal_get_bits_needed(value: int) -> int:
|
||||
massaged_value = value ^ (value >> 63)
|
||||
return 65 - FArchiveWriter.count_leading_zeroes(massaged_value)
|
||||
|
||||
@staticmethod
|
||||
def count_leading_zeroes(value: int) -> int:
|
||||
return 67 - len(bin(-value)) & ~value >> 64
|
||||
|
||||
def serializeint(self, component_bit_count: int, value: int):
|
||||
self.write(
|
||||
int.to_bytes(value, (component_bit_count + 7) // 8, "little", signed=True)
|
||||
)
|
||||
|
||||
def packed_vector(self, scale_factor: int, x: float, y: float, z: float):
|
||||
max_exponent_for_scaling = 52
|
||||
max_value_to_scale = 1 << max_exponent_for_scaling
|
||||
max_exponent_after_scaling = 62
|
||||
max_scaled_value = 1 << max_exponent_after_scaling
|
||||
scaled_x = x * scale_factor
|
||||
scaled_y = y * scale_factor
|
||||
scaled_z = z * scale_factor
|
||||
if max(abs(scaled_x), abs(scaled_y), abs(scaled_z)) < max_scaled_value:
|
||||
use_scaled_value = min(abs(x), abs(y), abs(z)) < max_value_to_scale
|
||||
if use_scaled_value:
|
||||
x = self.unreal_round_float_to_int(scaled_x)
|
||||
y = self.unreal_round_float_to_int(scaled_y)
|
||||
z = self.unreal_round_float_to_int(scaled_z)
|
||||
else:
|
||||
x = self.unreal_round_float_to_int(x)
|
||||
y = self.unreal_round_float_to_int(y)
|
||||
z = self.unreal_round_float_to_int(z)
|
||||
|
||||
component_bit_count = max(
|
||||
self.unreal_get_bits_needed(x),
|
||||
self.unreal_get_bits_needed(y),
|
||||
self.unreal_get_bits_needed(z),
|
||||
)
|
||||
component_bit_count_and_scale_info = (
|
||||
1 << 6 if use_scaled_value else 0
|
||||
) | component_bit_count
|
||||
self.u32(component_bit_count_and_scale_info)
|
||||
self.serializeint(component_bit_count, x)
|
||||
self.serializeint(component_bit_count, y)
|
||||
self.serializeint(component_bit_count, z)
|
||||
else:
|
||||
component_bit_count = 0
|
||||
component_bit_count_and_scale_info = (1 << 6) | component_bit_count
|
||||
self.u32(component_bit_count_and_scale_info)
|
||||
self.double(x)
|
||||
self.double(y)
|
||||
self.double(z)
|
||||
|
||||
def ftransform(self, value: dict[str, dict[str, float]]):
|
||||
self.double(value["rotation"]["x"])
|
||||
self.double(value["rotation"]["y"])
|
||||
self.double(value["rotation"]["z"])
|
||||
self.double(value["rotation"]["w"])
|
||||
self.double(value["translation"]["x"])
|
||||
self.double(value["translation"]["y"])
|
||||
self.double(value["translation"]["z"])
|
||||
self.double(value["scale3d"]["x"])
|
||||
self.double(value["scale3d"]["y"])
|
||||
self.double(value["scale3d"]["z"])
|
||||
|
|
@ -0,0 +1,155 @@
|
|||
import base64
|
||||
from typing import Any, Callable
|
||||
|
||||
from lib.archive import FArchiveReader, FArchiveWriter
|
||||
|
||||
|
||||
def custom_version_reader(reader: FArchiveReader):
|
||||
return (reader.guid(), reader.i32())
|
||||
|
||||
|
||||
def custom_version_writer(writer: FArchiveWriter, value: tuple[str, int]):
|
||||
writer.guid(value[0])
|
||||
writer.i32(value[1])
|
||||
|
||||
|
||||
class GvasHeader:
|
||||
magic: int
|
||||
save_game_version: int
|
||||
package_file_version_ue4: int
|
||||
package_file_version_ue5: int
|
||||
engine_version_major: int
|
||||
engine_version_minor: int
|
||||
engine_version_patch: int
|
||||
engine_version_changelist: int
|
||||
engine_version_branch: str
|
||||
custom_version_format: int
|
||||
custom_versions: list[tuple[str, int]]
|
||||
save_game_class_name: str
|
||||
|
||||
@staticmethod
|
||||
def read(reader: FArchiveReader) -> "GvasHeader":
|
||||
header = GvasHeader()
|
||||
# FileTypeTag
|
||||
header.magic = reader.i32()
|
||||
if header.magic != 0x53415647:
|
||||
raise Exception("invalid magic")
|
||||
# SaveGameFileVersion
|
||||
header.save_game_version = reader.i32()
|
||||
if header.save_game_version != 3:
|
||||
raise Exception(
|
||||
f"expected save game version 3, got {header.save_game_version}"
|
||||
)
|
||||
# PackageFileUEVersion
|
||||
header.package_file_version_ue4 = reader.i32()
|
||||
header.package_file_version_ue5 = reader.i32()
|
||||
# SavedEngineVersion
|
||||
header.engine_version_major = reader.u16()
|
||||
header.engine_version_minor = reader.u16()
|
||||
header.engine_version_patch = reader.u16()
|
||||
header.engine_version_changelist = reader.u32()
|
||||
header.engine_version_branch = reader.fstring()
|
||||
# CustomVersionFormat
|
||||
header.custom_version_format = reader.i32()
|
||||
if header.custom_version_format != 3:
|
||||
raise Exception(
|
||||
f"expected custom version format 3, got {header.custom_version_format}"
|
||||
)
|
||||
# CustomVersions
|
||||
header.custom_versions = reader.tarray(custom_version_reader)
|
||||
header.save_game_class_name = reader.fstring()
|
||||
return header
|
||||
|
||||
@staticmethod
|
||||
def load(dict: dict[str, Any]) -> "GvasHeader":
|
||||
header = GvasHeader()
|
||||
header.magic = dict["magic"]
|
||||
header.save_game_version = dict["save_game_version"]
|
||||
header.package_file_version_ue4 = dict["package_file_version_ue4"]
|
||||
header.package_file_version_ue5 = dict["package_file_version_ue5"]
|
||||
header.engine_version_major = dict["engine_version_major"]
|
||||
header.engine_version_minor = dict["engine_version_minor"]
|
||||
header.engine_version_patch = dict["engine_version_patch"]
|
||||
header.engine_version_changelist = dict["engine_version_changelist"]
|
||||
header.engine_version_branch = dict["engine_version_branch"]
|
||||
header.custom_version_format = dict["custom_version_format"]
|
||||
header.custom_versions = dict["custom_versions"]
|
||||
header.save_game_class_name = dict["save_game_class_name"]
|
||||
return header
|
||||
|
||||
def dump(self) -> dict[str, Any]:
|
||||
return {
|
||||
"magic": self.magic,
|
||||
"save_game_version": self.save_game_version,
|
||||
"package_file_version_ue4": self.package_file_version_ue4,
|
||||
"package_file_version_ue5": self.package_file_version_ue5,
|
||||
"engine_version_major": self.engine_version_major,
|
||||
"engine_version_minor": self.engine_version_minor,
|
||||
"engine_version_patch": self.engine_version_patch,
|
||||
"engine_version_changelist": self.engine_version_changelist,
|
||||
"engine_version_branch": self.engine_version_branch,
|
||||
"custom_version_format": self.custom_version_format,
|
||||
"custom_versions": self.custom_versions,
|
||||
"save_game_class_name": self.save_game_class_name,
|
||||
}
|
||||
|
||||
def write(self, writer: FArchiveWriter):
|
||||
writer.i32(self.magic)
|
||||
writer.i32(self.save_game_version)
|
||||
writer.i32(self.package_file_version_ue4)
|
||||
writer.i32(self.package_file_version_ue5)
|
||||
writer.u16(self.engine_version_major)
|
||||
writer.u16(self.engine_version_minor)
|
||||
writer.u16(self.engine_version_patch)
|
||||
writer.u32(self.engine_version_changelist)
|
||||
writer.fstring(self.engine_version_branch)
|
||||
writer.i32(self.custom_version_format)
|
||||
writer.tarray(custom_version_writer, self.custom_versions)
|
||||
writer.fstring(self.save_game_class_name)
|
||||
|
||||
|
||||
class GvasFile:
|
||||
header: GvasHeader
|
||||
properties: dict[str, Any]
|
||||
trailer: bytes
|
||||
|
||||
@staticmethod
|
||||
def read(
|
||||
data: bytes,
|
||||
type_hints: dict[str, str] = {},
|
||||
custom_properties: dict[str, tuple[Callable, Callable]] = {},
|
||||
) -> "GvasFile":
|
||||
gvas_file = GvasFile()
|
||||
reader = FArchiveReader(data, type_hints, custom_properties)
|
||||
gvas_file.header = GvasHeader.read(reader)
|
||||
gvas_file.properties = reader.properties_until_end()
|
||||
gvas_file.trailer = reader.read_to_end()
|
||||
if gvas_file.trailer != b"\x00\x00\x00\x00":
|
||||
print(
|
||||
f"{len(gvas_file.trailer)} bytes of trailer data, file may not have fully parsed"
|
||||
)
|
||||
return gvas_file
|
||||
|
||||
@staticmethod
|
||||
def load(dict: dict[str, Any]) -> "GvasFile":
|
||||
gvas_file = GvasFile()
|
||||
gvas_file.header = GvasHeader.load(dict["header"])
|
||||
gvas_file.properties = dict["properties"]
|
||||
gvas_file.trailer = base64.b64decode(dict["trailer"])
|
||||
return gvas_file
|
||||
|
||||
def dump(self) -> dict[str, Any]:
|
||||
return {
|
||||
"header": self.header.dump(),
|
||||
"properties": self.properties,
|
||||
"trailer": base64.b64encode(self.trailer).decode("utf-8"),
|
||||
}
|
||||
|
||||
def write(
|
||||
self, custom_properties: dict[str, tuple[Callable, Callable]] = {}
|
||||
) -> bytes:
|
||||
writer = FArchiveWriter(custom_properties)
|
||||
self.header.write(writer)
|
||||
writer.properties(self.properties)
|
||||
writer.write(self.trailer)
|
||||
return writer.bytes()
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
import ctypes
|
||||
import json
|
||||
import re
|
||||
import uuid
|
||||
|
||||
|
||||
class NoIndent(object):
|
||||
"""Value wrapper."""
|
||||
|
||||
def __init__(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
raise TypeError("Only lists and tuples can be wrapped")
|
||||
self.value = value
|
||||
|
||||
|
||||
class CustomEncoder(json.JSONEncoder):
|
||||
FORMAT_SPEC = "@@{}@@"
|
||||
regex = re.compile(FORMAT_SPEC.format(r"(\d+)"))
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
# Keyword arguments to ignore when encoding NoIndent wrapped values.
|
||||
ignore = {"cls", "indent"}
|
||||
|
||||
# Save copy of any keyword argument values needed for use here.
|
||||
self._kwargs = {k: v for k, v in kwargs.items() if k not in ignore}
|
||||
super(CustomEncoder, self).__init__(**kwargs)
|
||||
|
||||
def default(self, obj):
|
||||
if isinstance(obj, NoIndent):
|
||||
return self.FORMAT_SPEC.format(id(obj))
|
||||
elif isinstance(obj, uuid.UUID):
|
||||
return str(obj)
|
||||
return super(CustomEncoder, self).default(obj)
|
||||
|
||||
def iterencode(self, obj, **kwargs):
|
||||
format_spec = self.FORMAT_SPEC # Local var to expedite access.
|
||||
|
||||
# Replace any marked-up NoIndent wrapped values in the JSON repr
|
||||
# with the json.dumps() of the corresponding wrapped Python object.
|
||||
for encoded in super(CustomEncoder, self).iterencode(obj, **kwargs):
|
||||
match = self.regex.search(encoded)
|
||||
if match:
|
||||
id = int(match.group(1))
|
||||
no_indent = ctypes.cast(id, ctypes.py_object).value
|
||||
json_repr = json.dumps(no_indent.value, **self._kwargs)
|
||||
# Replace the matched id string with json formatted representation
|
||||
# of the corresponding Python object.
|
||||
encoded = encoded.replace(
|
||||
'"{}"'.format(format_spec.format(id)), json_repr
|
||||
)
|
||||
|
||||
yield encoded
|
||||
|
||||
|
||||
class NoIndentByteDecoder(json.JSONDecoder):
|
||||
def __init__(self, *args, **kwargs):
|
||||
json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs)
|
||||
|
||||
def object_hook(self, dct):
|
||||
if "value" in dct:
|
||||
if "values" in dct["value"]:
|
||||
if isinstance(dct["value"]["values"], list):
|
||||
if isinstance(dct["value"]["values"][0], int):
|
||||
dct["value"]["values"] = NoIndent(dct["value"]["values"])
|
||||
return dct
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
import zlib
|
||||
|
||||
MAGIC_BYTES = b"PlZ"
|
||||
|
||||
|
||||
def decompress_sav_to_gvas(data: bytes) -> tuple[bytes, int]:
|
||||
uncompressed_len = int.from_bytes(data[0:4], byteorder="little")
|
||||
compressed_len = int.from_bytes(data[4:8], byteorder="little")
|
||||
magic_bytes = data[8:11]
|
||||
save_type = data[11]
|
||||
# Check for magic bytes
|
||||
if magic_bytes != MAGIC_BYTES:
|
||||
raise Exception(
|
||||
f"not a compressed Palworld save, found {magic_bytes} instead of {MAGIC_BYTES}"
|
||||
)
|
||||
# Valid save types
|
||||
if save_type not in [0x30, 0x31, 0x32]:
|
||||
raise Exception(f"unknown save type: {save_type}")
|
||||
# We only have 0x31 (single zlib) and 0x32 (double zlib) saves
|
||||
if save_type not in [0x31, 0x32]:
|
||||
raise Exception(f"unhandled compression type: {save_type}")
|
||||
if save_type == 0x31:
|
||||
# Check if the compressed length is correct
|
||||
if compressed_len != len(data) - 12:
|
||||
raise Exception(f"incorrect compressed length: {compressed_len}")
|
||||
# Decompress file
|
||||
uncompressed_data = zlib.decompress(data[12:])
|
||||
if save_type == 0x32:
|
||||
# Check if the compressed length is correct
|
||||
if compressed_len != len(uncompressed_data):
|
||||
raise Exception(f"incorrect compressed length: {compressed_len}")
|
||||
# Decompress file
|
||||
uncompressed_data = zlib.decompress(uncompressed_data)
|
||||
# Check if the uncompressed length is correct
|
||||
if uncompressed_len != len(uncompressed_data):
|
||||
raise Exception(f"incorrect uncompressed length: {uncompressed_len}")
|
||||
|
||||
return uncompressed_data, save_type
|
||||
|
||||
|
||||
def compress_gvas_to_sav(data: bytes, save_type: int) -> bytes:
|
||||
uncompressed_len = len(data)
|
||||
compressed_data = zlib.compress(data)
|
||||
compressed_len = len(compressed_data)
|
||||
if save_type == 0x32:
|
||||
compressed_data = zlib.compress(compressed_data)
|
||||
|
||||
# Create a byte array and append the necessary information
|
||||
result = bytearray()
|
||||
result.extend(uncompressed_len.to_bytes(4, byteorder="little"))
|
||||
result.extend(compressed_len.to_bytes(4, byteorder="little"))
|
||||
result.extend(MAGIC_BYTES)
|
||||
result.extend(bytes([save_type]))
|
||||
result.extend(compressed_data)
|
||||
|
||||
return bytes(result)
|
||||
|
|
@ -0,0 +1,127 @@
|
|||
from typing import Any, Callable
|
||||
|
||||
from lib.archive import FArchiveReader, FArchiveWriter
|
||||
from lib.rawdata import (
|
||||
base_camp,
|
||||
base_camp_module,
|
||||
build_process,
|
||||
character,
|
||||
character_container,
|
||||
connector,
|
||||
debug,
|
||||
dynamic_item,
|
||||
foliage_model,
|
||||
foliage_model_instance,
|
||||
group,
|
||||
item_container,
|
||||
item_container_slots,
|
||||
map_model,
|
||||
work_collection,
|
||||
worker_director,
|
||||
)
|
||||
|
||||
PALWORLD_TYPE_HINTS: dict[str, str] = {
|
||||
".worldSaveData.CharacterContainerSaveData.Key": "StructProperty",
|
||||
".worldSaveData.CharacterSaveParameterMap.Key": "StructProperty",
|
||||
".worldSaveData.CharacterSaveParameterMap.Value": "StructProperty",
|
||||
".worldSaveData.FoliageGridSaveDataMap.Key": "StructProperty",
|
||||
".worldSaveData.FoliageGridSaveDataMap.Value.ModelMap.Value": "StructProperty",
|
||||
".worldSaveData.FoliageGridSaveDataMap.Value.ModelMap.Value.InstanceDataMap.Key": "StructProperty",
|
||||
".worldSaveData.FoliageGridSaveDataMap.Value.ModelMap.Value.InstanceDataMap.Value": "StructProperty",
|
||||
".worldSaveData.FoliageGridSaveDataMap.Value": "StructProperty",
|
||||
".worldSaveData.ItemContainerSaveData.Key": "StructProperty",
|
||||
".worldSaveData.MapObjectSaveData.MapObjectSaveData.ConcreteModel.ModuleMap.Value": "StructProperty",
|
||||
".worldSaveData.MapObjectSaveData.MapObjectSaveData.Model.EffectMap.Value": "StructProperty",
|
||||
".worldSaveData.MapObjectSpawnerInStageSaveData.Key": "StructProperty",
|
||||
".worldSaveData.MapObjectSpawnerInStageSaveData.Value": "StructProperty",
|
||||
".worldSaveData.MapObjectSpawnerInStageSaveData.Value.SpawnerDataMapByLevelObjectInstanceId.Key": "Guid",
|
||||
".worldSaveData.MapObjectSpawnerInStageSaveData.Value.SpawnerDataMapByLevelObjectInstanceId.Value": "StructProperty",
|
||||
".worldSaveData.MapObjectSpawnerInStageSaveData.Value.SpawnerDataMapByLevelObjectInstanceId.Value.ItemMap.Value": "StructProperty",
|
||||
".worldSaveData.WorkSaveData.WorkSaveData.WorkAssignMap.Value": "StructProperty",
|
||||
".worldSaveData.BaseCampSaveData.Key": "Guid",
|
||||
".worldSaveData.BaseCampSaveData.Value": "StructProperty",
|
||||
".worldSaveData.BaseCampSaveData.Value.ModuleMap.Value": "StructProperty",
|
||||
".worldSaveData.ItemContainerSaveData.Value": "StructProperty",
|
||||
".worldSaveData.CharacterContainerSaveData.Value": "StructProperty",
|
||||
".worldSaveData.GroupSaveDataMap.Key": "Guid",
|
||||
".worldSaveData.GroupSaveDataMap.Value": "StructProperty",
|
||||
".worldSaveData.EnemyCampSaveData.EnemyCampStatusMap.Value": "StructProperty",
|
||||
".worldSaveData.DungeonSaveData.DungeonSaveData.MapObjectSaveData.MapObjectSaveData.Model.EffectMap.Value": "StructProperty",
|
||||
".worldSaveData.DungeonSaveData.DungeonSaveData.MapObjectSaveData.MapObjectSaveData.ConcreteModel.ModuleMap.Value": "StructProperty",
|
||||
}
|
||||
|
||||
PALWORLD_CUSTOM_PROPERTIES: dict[
|
||||
str,
|
||||
tuple[
|
||||
Callable[[FArchiveReader, str, int, str], dict[str, Any]],
|
||||
Callable[[FArchiveWriter, str, dict[str, Any]], int],
|
||||
],
|
||||
] = {
|
||||
".worldSaveData.GroupSaveDataMap": (group.decode, group.encode),
|
||||
".worldSaveData.CharacterSaveParameterMap.Value.RawData": (
|
||||
character.decode,
|
||||
character.encode,
|
||||
),
|
||||
".worldSaveData.MapObjectSaveData.MapObjectSaveData.Model.BuildProcess.RawData": (
|
||||
build_process.decode,
|
||||
build_process.encode,
|
||||
),
|
||||
".worldSaveData.MapObjectSaveData.MapObjectSaveData.Model.Connector.RawData": (
|
||||
connector.decode,
|
||||
connector.encode,
|
||||
),
|
||||
".worldSaveData.MapObjectSaveData.MapObjectSaveData.Model.RawData": (
|
||||
map_model.decode,
|
||||
map_model.encode,
|
||||
),
|
||||
".worldSaveData.ItemContainerSaveData.Value.RawData": (
|
||||
item_container.decode,
|
||||
item_container.encode,
|
||||
),
|
||||
".worldSaveData.ItemContainerSaveData.Value.Slots.Slots.RawData": (
|
||||
item_container_slots.decode,
|
||||
item_container_slots.encode,
|
||||
),
|
||||
# This isn't actually serialised into at all?
|
||||
# ".worldSaveData.CharacterContainerSaveData.Value.RawData": (debug.decode, debug.encode),
|
||||
# This duplicates the data already serialised into the Slots UObject?
|
||||
".worldSaveData.CharacterContainerSaveData.Value.Slots.Slots.RawData": (
|
||||
character_container.decode,
|
||||
character_container.encode,
|
||||
),
|
||||
# DynamicItemSaveData is problematic because serialisation is dependent on type, which is not immediately obvious
|
||||
".worldSaveData.DynamicItemSaveData.DynamicItemSaveData.RawData": (
|
||||
dynamic_item.decode,
|
||||
dynamic_item.encode,
|
||||
),
|
||||
".worldSaveData.FoliageGridSaveDataMap.Value.ModelMap.Value.RawData": (
|
||||
foliage_model.decode,
|
||||
foliage_model.encode,
|
||||
),
|
||||
".worldSaveData.FoliageGridSaveDataMap.Value.ModelMap.Value.InstanceDataMap.Value.RawData": (
|
||||
foliage_model_instance.decode,
|
||||
foliage_model_instance.encode,
|
||||
),
|
||||
".worldSaveData.BaseCampSaveData.Value.RawData": (
|
||||
base_camp.decode,
|
||||
base_camp.encode,
|
||||
),
|
||||
".worldSaveData.BaseCampSaveData.Value.WorkerDirector.RawData": (
|
||||
worker_director.decode,
|
||||
worker_director.encode,
|
||||
),
|
||||
".worldSaveData.BaseCampSaveData.Value.WorkCollection.RawData": (
|
||||
work_collection.decode,
|
||||
work_collection.encode,
|
||||
),
|
||||
# ".worldSaveData.BaseCampSaveData.Value.ModuleMap": (base_camp_module.decode, base_camp_module.encode),
|
||||
# ".worldSaveData.WorkSaveData.WorkSaveData.RawData": (debug.decode, debug.encode),
|
||||
# ".worldSaveData.WorkSaveData.WorkSaveData.WorkAssignMap.Value.RawData": (debug.decode, debug.encode),
|
||||
# ConcreteModel is problematic because serialisation is dependent on type, which is not immediately obvious
|
||||
# ".worldSaveData.MapObjectSaveData.MapObjectSaveData.ConcreteModel": (
|
||||
# decode_map_concrete_model,
|
||||
# encode_map_concrete_model,
|
||||
# ),
|
||||
# ".worldSaveData.MapObjectSaveData.MapObjectSaveData.ConcreteModel.RawData": (),
|
||||
# ".worldSaveData.MapObjectSaveData.MapObjectSaveData.ConcreteModel.ModuleMap.Value.RawData": (),
|
||||
}
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
data_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(data_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(b_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
reader = FArchiveReader(bytes(b_bytes))
|
||||
data = {}
|
||||
data["id"] = reader.guid()
|
||||
data["name"] = reader.fstring()
|
||||
data["state"] = reader.byte()
|
||||
data["transform"] = reader.ftransform()
|
||||
data["area_range"] = reader.float()
|
||||
data["group_id_belong_to"] = reader.guid()
|
||||
data["fast_travel_local_transform"] = reader.ftransform()
|
||||
data["owner_map_object_instance_id"] = reader.guid()
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
writer = FArchiveWriter()
|
||||
writer.guid(p["id"])
|
||||
writer.fstring(p["name"])
|
||||
writer.byte(p["state"])
|
||||
writer.ftransform(p["transform"])
|
||||
writer.float(p["area_range"])
|
||||
writer.guid(p["group_id_belong_to"])
|
||||
writer.ftransform(p["fast_travel_local_transform"])
|
||||
writer.guid(p["owner_map_object_instance_id"])
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,122 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
NO_OP_TYPES = [
|
||||
"EPalBaseCampModuleType::Energy",
|
||||
"EPalBaseCampModuleType::Medical",
|
||||
"EPalBaseCampModuleType::ResourceCollector",
|
||||
"EPalBaseCampModuleType::ItemStorages",
|
||||
"EPalBaseCampModuleType::FacilityReservation",
|
||||
"EPalBaseCampModuleType::ObjectMaintenance",
|
||||
]
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "MapProperty":
|
||||
raise Exception(f"Expected MapProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
# module map
|
||||
module_map = value["value"]
|
||||
for module in module_map:
|
||||
module_type = module["key"]
|
||||
module_bytes = module["value"]["RawData"]["value"]["values"]
|
||||
print(module_type)
|
||||
print("".join(f"{b:02x}" for b in module_bytes))
|
||||
# module["value"]["RawData"]["value"] = decode_bytes(module_bytes, module_type)
|
||||
return value
|
||||
|
||||
|
||||
def pal_item_and_slot_read(reader: FArchiveReader) -> dict[str, Any]:
|
||||
return {
|
||||
"item_id": {
|
||||
# "static_id": reader.fstring(),
|
||||
# "dynamic_id": {
|
||||
"created_world_id": reader.guid(),
|
||||
"local_id_in_created_world": reader.guid(),
|
||||
# }
|
||||
},
|
||||
"slot_id": reader.guid(),
|
||||
}
|
||||
|
||||
|
||||
def transport_item_character_info_reader(reader: FArchiveReader) -> dict[str, Any]:
|
||||
return {
|
||||
"item_infos": reader.tarray,
|
||||
"character_location": {
|
||||
"x": reader.double(),
|
||||
"y": reader.double(),
|
||||
"z": reader.double(),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
PASSIVE_EFFECT_ENUM = {
|
||||
0: "EPalBaseCampPassiveEffectType::None",
|
||||
1: "EPalBaseCampPassiveEffectType::WorkSuitability",
|
||||
2: "EPalBaseCampPassiveEffectType::WorkHard",
|
||||
}
|
||||
|
||||
|
||||
def module_passive_effect_reader(reader: FArchiveReader) -> dict[str, Any]:
|
||||
data = {}
|
||||
data["type"] = reader.byte()
|
||||
if data["type"] not in PASSIVE_EFFECT_ENUM:
|
||||
raise Exception(f"Unknown passive effect type {data['type']}")
|
||||
elif data["type"] == 1:
|
||||
data["work_hard_type"] = reader.byte()
|
||||
return data
|
||||
|
||||
|
||||
def decode_bytes(b_bytes: Sequence[int], module_type: str) -> dict[str, Any]:
|
||||
reader = FArchiveReader(bytes(b_bytes))
|
||||
data = {}
|
||||
if module_type in NO_OP_TYPES:
|
||||
pass
|
||||
elif module_type == "EPalBaseCampModuleType::TransportItemDirector":
|
||||
try:
|
||||
data["transport_item_character_infos"] = reader.tarray(
|
||||
transport_item_character_info_reader
|
||||
)
|
||||
except Exception as e:
|
||||
reader.data.seek(0)
|
||||
print(
|
||||
f"Warning: Failed to decode transport item director, please report this: {e} ({reader.bytes()})"
|
||||
)
|
||||
data = {"values": b_bytes}
|
||||
elif module_type == "EPalBaseCampModuleType::PassiveEffect":
|
||||
try:
|
||||
data["passive_effects"] = reader.tarray(module_passive_effect_reader)
|
||||
except Exception as e:
|
||||
reader.data.seek(0)
|
||||
print(
|
||||
f"Warning: Failed to decode passive effect, please report this: {e} ({reader.bytes()})"
|
||||
)
|
||||
data = {"values": b_bytes}
|
||||
else:
|
||||
print(f"Warning: Unknown base camp module type {module_type}, skipping")
|
||||
data["values"] = [b for b in reader.bytes()]
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "MapProperty":
|
||||
raise Exception(f"Expected MapProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
# encoded_bytes = encode_bytes(properties["value"])
|
||||
# properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
writer = FArchiveWriter()
|
||||
writer.byte(p["state"])
|
||||
writer.guid(p["id"])
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
data_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(data_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(b_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
reader = FArchiveReader(bytes(b_bytes))
|
||||
data = {}
|
||||
data["state"] = reader.byte()
|
||||
data["id"] = reader.guid()
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
writer = FArchiveWriter()
|
||||
writer.byte(p["state"])
|
||||
writer.guid(p["id"])
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
char_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(char_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(char_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
reader = FArchiveReader(bytes(char_bytes))
|
||||
char_data = {}
|
||||
char_data["object"] = reader.properties_until_end()
|
||||
char_data["unknown_bytes"] = reader.byte_list(4)
|
||||
char_data["group_id"] = reader.guid()
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return char_data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
writer = FArchiveWriter()
|
||||
writer.properties(p["object"])
|
||||
writer.write(bytes(p["unknown_bytes"]))
|
||||
writer.guid(p["group_id"])
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
data_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(data_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(c_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
if len(c_bytes) == 0:
|
||||
return None
|
||||
reader = FArchiveReader(bytes(c_bytes))
|
||||
data = {}
|
||||
data["player_uid"] = reader.guid()
|
||||
data["instance_id"] = reader.guid()
|
||||
data["permission_tribe_id"] = reader.byte()
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
if p is None:
|
||||
return bytes()
|
||||
writer = FArchiveWriter()
|
||||
writer.guid(p["player_uid"])
|
||||
writer.guid(p["instance_id"])
|
||||
writer.byte(p["permission_tribe_id"])
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
data_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(data_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def connect_info_item_reader(reader: FArchiveReader) -> dict[str, Any]:
|
||||
return {
|
||||
"connect_to_model_instance_id": reader.guid(),
|
||||
"index": reader.byte(),
|
||||
}
|
||||
|
||||
|
||||
def connect_info_item_writer(writer: FArchiveWriter, properties: dict[str, Any]):
|
||||
writer.guid(properties["connect_to_model_instance_id"])
|
||||
writer.byte(properties["index"])
|
||||
|
||||
|
||||
def decode_bytes(c_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
if len(c_bytes) == 0:
|
||||
return None
|
||||
reader = FArchiveReader(bytes(c_bytes))
|
||||
data = {}
|
||||
data["supported_level"] = reader.i32()
|
||||
data["connect"] = {
|
||||
"index": reader.byte(),
|
||||
"any_place": reader.tarray(connect_info_item_reader),
|
||||
}
|
||||
# We are guessing here, we don't have information about the type without mapping object names -> types
|
||||
# Stairs have 2 connectors (up and down),
|
||||
# Roofs have 4 connectors (front, back, right, left)
|
||||
if not reader.eof():
|
||||
data["other_connectors"] = []
|
||||
while not reader.eof():
|
||||
data["other_connectors"].append(
|
||||
{
|
||||
"index": reader.byte(),
|
||||
"connect": reader.tarray(connect_info_item_reader),
|
||||
}
|
||||
)
|
||||
if len(data["other_connectors"]) not in [2, 4]:
|
||||
print(
|
||||
f"Warning: unknown connector type with {len(data['other_connectors'])} connectors"
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
if p is None:
|
||||
return bytes()
|
||||
writer = FArchiveWriter()
|
||||
writer.i32(p["supported_level"])
|
||||
writer.byte(p["connect"]["index"])
|
||||
writer.tarray(connect_info_item_writer, p["connect"]["any_place"])
|
||||
if "other_connectors" in p:
|
||||
for other in p["other_connectors"]:
|
||||
writer.byte(other["index"])
|
||||
writer.tarray(connect_info_item_writer, other["connect"])
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
from typing import Any
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
debug_bytes = value["value"]["values"]
|
||||
if len(debug_bytes) > 0:
|
||||
debug_str = "".join(f"{b:02x}" for b in debug_bytes)
|
||||
# if debug_str != "00000000000000000000000000000000":
|
||||
print(debug_str)
|
||||
# print(bytes(debug_bytes))
|
||||
return value
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
data_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(data_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(c_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
if len(c_bytes) == 0:
|
||||
return None
|
||||
buf = bytes(c_bytes)
|
||||
reader = FArchiveReader(buf)
|
||||
data = {}
|
||||
data["id"] = {
|
||||
"created_world_id": reader.guid(),
|
||||
"local_id_in_created_world": reader.guid(),
|
||||
"static_id": reader.fstring(),
|
||||
}
|
||||
data["type"] = "unknown"
|
||||
egg_data = try_read_egg(reader)
|
||||
if egg_data != None:
|
||||
data |= egg_data
|
||||
elif (reader.size - reader.data.tell()) == 4:
|
||||
data["type"] = "armor"
|
||||
data["durability"] = reader.float()
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
else:
|
||||
cur_pos = reader.data.tell()
|
||||
temp_data = {"type": "weapon"}
|
||||
try:
|
||||
temp_data["durability"] = reader.float()
|
||||
temp_data["remaining_bullets"] = reader.i32()
|
||||
temp_data["passive_skill_list"] = reader.tarray(lambda r: r.fstring())
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
data |= temp_data
|
||||
except Exception as e:
|
||||
print(
|
||||
f"Warning: Failed to parse weapon data, continuing as raw data {buf}: {e}"
|
||||
)
|
||||
reader.data.seek(cur_pos)
|
||||
data["trailer"] = [int(b) for b in reader.read_to_end()]
|
||||
return data
|
||||
|
||||
|
||||
def try_read_egg(reader: FArchiveReader) -> Optional[dict[str, Any]]:
|
||||
cur_pos = reader.data.tell()
|
||||
try:
|
||||
data = {"type": "egg"}
|
||||
data["character_id"] = reader.fstring()
|
||||
data["object"] = reader.properties_until_end()
|
||||
data["unknown_bytes"] = reader.byte_list(4)
|
||||
data["unknown_id"] = reader.guid()
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return data
|
||||
except Exception as e:
|
||||
if e.args[0] == "Warning: EOF not reached":
|
||||
raise e
|
||||
reader.data.seek(cur_pos)
|
||||
return None
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
if p is None:
|
||||
return bytes()
|
||||
writer = FArchiveWriter()
|
||||
writer.guid(p["id"]["created_world_id"])
|
||||
writer.guid(p["id"]["local_id_in_created_world"])
|
||||
writer.fstring(p["id"]["static_id"])
|
||||
if p["type"] == "unknown":
|
||||
writer.write(bytes(p["trailer"]))
|
||||
elif p["type"] == "egg":
|
||||
writer.fstring(p["character_id"])
|
||||
writer.properties(p["object"])
|
||||
writer.write(bytes(p["unknown_bytes"]))
|
||||
writer.guid(p["unknown_id"])
|
||||
elif p["type"] == "armor":
|
||||
writer.float(p["durability"])
|
||||
elif p["type"] == "weapon":
|
||||
writer.float(p["durability"])
|
||||
writer.i32(p["remaining_bullets"])
|
||||
writer.tarray(lambda w, d: w.fstring(d), p["passive_skill_list"])
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
data_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(data_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(b_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
reader = FArchiveReader(bytes(b_bytes))
|
||||
data = {}
|
||||
data["model_id"] = reader.fstring()
|
||||
data["foliage_preset_type"] = reader.byte()
|
||||
data["cell_coord"] = {
|
||||
"x": reader.i64(),
|
||||
"y": reader.i64(),
|
||||
"z": reader.i64(),
|
||||
}
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
writer = FArchiveWriter()
|
||||
|
||||
writer.fstring(p["model_id"])
|
||||
writer.byte(p["foliage_preset_type"])
|
||||
writer.i64(p["cell_coord"]["x"])
|
||||
writer.i64(p["cell_coord"]["y"])
|
||||
writer.i64(p["cell_coord"]["z"])
|
||||
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
data_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(data_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(b_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
reader = FArchiveReader(bytes(b_bytes))
|
||||
data = {}
|
||||
data["model_instance_id"] = reader.guid()
|
||||
pitch, yaw, roll = reader.compressed_short_rotator()
|
||||
x, y, z = reader.packed_vector(1)
|
||||
data["world_transform"] = {
|
||||
"rotator": {
|
||||
"pitch": pitch,
|
||||
"yaw": yaw,
|
||||
"roll": roll,
|
||||
},
|
||||
"location": {
|
||||
"x": x,
|
||||
"y": y,
|
||||
"z": z,
|
||||
},
|
||||
"scale_x": reader.float(),
|
||||
}
|
||||
data["hp"] = reader.i32()
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
writer = FArchiveWriter()
|
||||
|
||||
writer.guid(p["model_instance_id"])
|
||||
writer.compressed_short_rotator(
|
||||
p["world_transform"]["rotator"]["pitch"],
|
||||
p["world_transform"]["rotator"]["yaw"],
|
||||
p["world_transform"]["rotator"]["roll"],
|
||||
)
|
||||
writer.packed_vector(
|
||||
1,
|
||||
p["world_transform"]["location"]["x"],
|
||||
p["world_transform"]["location"]["y"],
|
||||
p["world_transform"]["location"]["z"],
|
||||
)
|
||||
writer.float(p["world_transform"]["scale_x"])
|
||||
writer.i32(p["hp"])
|
||||
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
from typing import Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "MapProperty":
|
||||
raise Exception(f"Expected MapProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
# Decode the raw bytes and replace the raw data
|
||||
group_map = value["value"]
|
||||
for group in group_map:
|
||||
group_type = group["value"]["GroupType"]["value"]["value"]
|
||||
group_bytes = group["value"]["RawData"]["value"]["values"]
|
||||
group["value"]["RawData"]["value"] = decode_bytes(group_bytes, group_type)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(group_bytes: Sequence[int], group_type: str) -> dict[str, Any]:
|
||||
reader = FArchiveReader(bytes(group_bytes))
|
||||
group_data = {
|
||||
"group_type": group_type,
|
||||
"group_id": reader.guid(),
|
||||
"group_name": reader.fstring(),
|
||||
"individual_character_handle_ids": reader.tarray(instance_id_reader),
|
||||
}
|
||||
if group_type in [
|
||||
"EPalGroupType::Guild",
|
||||
"EPalGroupType::IndependentGuild",
|
||||
"EPalGroupType::Organization",
|
||||
]:
|
||||
org = {
|
||||
"org_type": reader.byte(),
|
||||
"base_ids": reader.tarray(uuid_reader),
|
||||
}
|
||||
group_data |= org
|
||||
if group_type in ["EPalGroupType::Guild", "EPalGroupType::IndependentGuild"]:
|
||||
guild = {
|
||||
"base_camp_level": reader.i32(),
|
||||
"map_object_instance_ids_base_camp_points": reader.tarray(uuid_reader),
|
||||
"guild_name": reader.fstring(),
|
||||
}
|
||||
group_data |= guild
|
||||
if group_type == "EPalGroupType::IndependentGuild":
|
||||
indie = {
|
||||
"player_uid": reader.guid(),
|
||||
"guild_name_2": reader.fstring(),
|
||||
"player_info": {
|
||||
"last_online_real_time": reader.i64(),
|
||||
"player_name": reader.fstring(),
|
||||
},
|
||||
}
|
||||
group_data |= indie
|
||||
if group_type == "EPalGroupType::Guild":
|
||||
guild = {"admin_player_uid": reader.guid(), "players": []}
|
||||
player_count = reader.i32()
|
||||
for _ in range(player_count):
|
||||
player = {
|
||||
"player_uid": reader.guid(),
|
||||
"player_info": {
|
||||
"last_online_real_time": reader.i64(),
|
||||
"player_name": reader.fstring(),
|
||||
},
|
||||
}
|
||||
guild["players"].append(player)
|
||||
group_data |= guild
|
||||
#if not reader.eof():
|
||||
#raise Exception("Warning: EOF not reached")
|
||||
return group_data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "MapProperty":
|
||||
raise Exception(f"Expected MapProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
group_map = properties["value"]
|
||||
for group in group_map:
|
||||
if "values" in group["value"]["RawData"]["value"]:
|
||||
continue
|
||||
p = group["value"]["RawData"]["value"]
|
||||
encoded_bytes = encode_bytes(p)
|
||||
group["value"]["RawData"]["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
writer = FArchiveWriter()
|
||||
writer.guid(p["group_id"])
|
||||
writer.fstring(p["group_name"])
|
||||
writer.tarray(instance_id_writer, p["individual_character_handle_ids"])
|
||||
if p["group_type"] in [
|
||||
"EPalGroupType::Guild",
|
||||
"EPalGroupType::IndependentGuild",
|
||||
"EPalGroupType::Organization",
|
||||
]:
|
||||
writer.byte(p["org_type"])
|
||||
writer.tarray(uuid_writer, p["base_ids"])
|
||||
if p["group_type"] in ["EPalGroupType::Guild", "EPalGroupType::IndependentGuild"]:
|
||||
writer.i32(p["base_camp_level"])
|
||||
writer.tarray(uuid_writer, p["map_object_instance_ids_base_camp_points"])
|
||||
writer.fstring(p["guild_name"])
|
||||
if p["group_type"] == "EPalGroupType::IndependentGuild":
|
||||
writer.guid(p["player_uid"])
|
||||
writer.fstring(p["guild_name_2"])
|
||||
writer.i64(p["player_info"]["last_online_real_time"])
|
||||
writer.fstring(p["player_info"]["player_name"])
|
||||
if p["group_type"] == "EPalGroupType::Guild":
|
||||
writer.guid(p["admin_player_uid"])
|
||||
writer.i32(len(p["players"]))
|
||||
for i in range(len(p["players"])):
|
||||
writer.guid(p["players"][i]["player_uid"])
|
||||
writer.i64(p["players"][i]["player_info"]["last_online_real_time"])
|
||||
writer.fstring(p["players"][i]["player_info"]["player_name"])
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
data_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(data_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(c_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
if len(c_bytes) == 0:
|
||||
return None
|
||||
reader = FArchiveReader(bytes(c_bytes))
|
||||
data = {}
|
||||
data["permission"] = {
|
||||
"type_a": reader.tarray(lambda r: r.byte()),
|
||||
"type_b": reader.tarray(lambda r: r.byte()),
|
||||
"item_static_ids": reader.tarray(lambda r: r.fstring()),
|
||||
}
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
if p is None:
|
||||
return bytes()
|
||||
writer = FArchiveWriter()
|
||||
writer.tarray(lambda w, d: w.byte(d), p["permission"]["type_a"])
|
||||
writer.tarray(lambda w, d: w.byte(d), p["permission"]["type_b"])
|
||||
writer.tarray(lambda w, d: w.fstring(d), p["permission"]["item_static_ids"])
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
data_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(data_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(c_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
if len(c_bytes) == 0:
|
||||
return None
|
||||
reader = FArchiveReader(bytes(c_bytes))
|
||||
data = {}
|
||||
data["permission"] = {
|
||||
"type_a": reader.tarray(lambda r: r.byte()),
|
||||
"type_b": reader.tarray(lambda r: r.byte()),
|
||||
"item_static_ids": reader.tarray(lambda r: r.fstring()),
|
||||
}
|
||||
data["corruption_progress_value"] = reader.float()
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
if p is None:
|
||||
return bytes()
|
||||
writer = FArchiveWriter()
|
||||
writer.tarray(lambda w, d: w.byte(d), p["permission"]["type_a"])
|
||||
writer.tarray(lambda w, d: w.byte(d), p["permission"]["type_b"])
|
||||
writer.tarray(lambda w, d: w.fstring(d), p["permission"]["item_static_ids"])
|
||||
writer.float(p["corruption_progress_value"])
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
# def decode_map_concrete_model(
|
||||
# reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
# ) -> dict[str, Any]:
|
||||
# if type_name != "StructProperty":
|
||||
# raise Exception(f"Expected StructProperty, got {type_name}")
|
||||
# value = reader.property(type_name, size, path, allow_custom=False)
|
||||
# # Decode the raw bytes for the map object and replace the raw data
|
||||
# raw_bytes = value["value"]["RawData"]["value"]["values"]
|
||||
# print("".join(f"{b:02x}" for b in raw_bytes))
|
||||
# # value["value"]["RawData"]["value"] = decode_map_concrete_model_bytes(raw_bytes)
|
||||
# # Decode the raw bytes for the module map and replace the raw data
|
||||
# # group_map = value["value"]
|
||||
# # for group in group_map:
|
||||
# # group_type = group["value"]["GroupType"]["value"]["value"]
|
||||
# # group_bytes = group["value"]["RawData"]["value"]["values"]
|
||||
# # group["value"]["RawData"]["value"] = decode_map_concrete_model_bytes(
|
||||
# # group_bytes, group_type
|
||||
# # )
|
||||
# # EPalMapObjectConcreteModelModuleType::None = 0,
|
||||
# # EPalMapObjectConcreteModelModuleType::ItemContainer = 1,
|
||||
# # EPalMapObjectConcreteModelModuleType::CharacterContainer = 2,
|
||||
# # EPalMapObjectConcreteModelModuleType::Workee = 3,
|
||||
# # EPalMapObjectConcreteModelModuleType::Energy = 4,
|
||||
# # EPalMapObjectConcreteModelModuleType::StatusObserver = 5,
|
||||
# # EPalMapObjectConcreteModelModuleType::ItemStack = 6,
|
||||
# # EPalMapObjectConcreteModelModuleType::Switch = 7,
|
||||
# # EPalMapObjectConcreteModelModuleType::PlayerRecord = 8,
|
||||
# # EPalMapObjectConcreteModelModuleType::BaseCampPassiveEffect = 9,
|
||||
# # EPalMapObjectConcreteModelModuleType::PasswordLock = 10,
|
||||
# return value
|
||||
|
||||
|
||||
# def decode_map_concrete_model_bytes(m_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
# if len(m_bytes) == 0:
|
||||
# return None
|
||||
# reader = FArchiveReader(bytes(m_bytes))
|
||||
# map_concrete_model = {}
|
||||
|
||||
# if not reader.eof():
|
||||
# raise Exception("Warning: EOF not reached")
|
||||
# return map_concrete_model
|
||||
|
||||
|
||||
# def encode_map_concrete_model(
|
||||
# writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
# ) -> int:
|
||||
# if property_type != "MapProperty":
|
||||
# raise Exception(f"Expected MapProperty, got {property_type}")
|
||||
# del properties["custom_type"]
|
||||
# # encoded_bytes = encode_map_concrete_model_bytes(properties["value"]["RawData"]["value"])
|
||||
# # properties["value"]["RawData"]["value"] = {"values": [b for b in encoded_bytes]}
|
||||
# return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
# def encode_map_concrete_model_bytes(p: dict[str, Any]) -> bytes:
|
||||
# writer = FArchiveWriter()
|
||||
|
||||
# encoded_bytes = writer.bytes()
|
||||
# return encoded_bytes
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
data_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(data_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(m_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
reader = FArchiveReader(bytes(m_bytes))
|
||||
data = {}
|
||||
data["instance_id"] = reader.guid()
|
||||
data["concrete_model_instance_id"] = reader.guid()
|
||||
data["base_camp_id_belong_to"] = reader.guid()
|
||||
data["group_id_belong_to"] = reader.guid()
|
||||
data["hp"] = {
|
||||
"current": reader.i32(),
|
||||
"max": reader.i32(),
|
||||
}
|
||||
data["initital_transform_cache"] = reader.ftransform()
|
||||
data["repair_work_id"] = reader.guid()
|
||||
data["owner_spawner_level_object_instance_id"] = reader.guid()
|
||||
data["owner_instance_id"] = reader.guid()
|
||||
data["build_player_uid"] = reader.guid()
|
||||
data["interact_restrict_type"] = reader.byte()
|
||||
data["stage_instance_id_belong_to"] = {
|
||||
"id": reader.guid(),
|
||||
"valid": reader.u32() > 0,
|
||||
}
|
||||
data["created_at"] = reader.i64()
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
writer = FArchiveWriter()
|
||||
|
||||
writer.guid(p["instance_id"])
|
||||
writer.guid(p["concrete_model_instance_id"])
|
||||
writer.guid(p["base_camp_id_belong_to"])
|
||||
writer.guid(p["group_id_belong_to"])
|
||||
|
||||
writer.i32(p["hp"]["current"])
|
||||
writer.i32(p["hp"]["max"])
|
||||
|
||||
writer.ftransform(p["initital_transform_cache"])
|
||||
|
||||
writer.guid(p["repair_work_id"])
|
||||
writer.guid(p["owner_spawner_level_object_instance_id"])
|
||||
writer.guid(p["owner_instance_id"])
|
||||
writer.guid(p["build_player_uid"])
|
||||
|
||||
writer.byte(p["interact_restrict_type"])
|
||||
|
||||
writer.guid(p["stage_instance_id_belong_to"]["id"])
|
||||
writer.u32(1 if p["stage_instance_id_belong_to"]["valid"] else 0)
|
||||
|
||||
writer.i64(p["created_at"])
|
||||
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
data_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(data_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(b_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
reader = FArchiveReader(bytes(b_bytes))
|
||||
data = {}
|
||||
data["id"] = reader.guid()
|
||||
data["work_ids"] = reader.tarray(uuid_reader)
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
writer = FArchiveWriter()
|
||||
writer.guid(p["id"])
|
||||
writer.tarray(uuid_writer, p["work_ids"])
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
from typing import Any, Sequence
|
||||
|
||||
from lib.archive import *
|
||||
|
||||
|
||||
def decode(
|
||||
reader: FArchiveReader, type_name: str, size: int, path: str
|
||||
) -> dict[str, Any]:
|
||||
if type_name != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {type_name}")
|
||||
value = reader.property(type_name, size, path, allow_custom=False)
|
||||
data_bytes = value["value"]["values"]
|
||||
value["value"] = decode_bytes(data_bytes)
|
||||
return value
|
||||
|
||||
|
||||
def decode_bytes(b_bytes: Sequence[int]) -> dict[str, Any]:
|
||||
reader = FArchiveReader(bytes(b_bytes))
|
||||
data = {}
|
||||
data["id"] = reader.guid()
|
||||
data["spawn_transform"] = reader.ftransform()
|
||||
data["current_order_type"] = reader.byte()
|
||||
data["current_battle_type"] = reader.byte()
|
||||
data["container_id"] = reader.guid()
|
||||
if not reader.eof():
|
||||
raise Exception("Warning: EOF not reached")
|
||||
return data
|
||||
|
||||
|
||||
def encode(
|
||||
writer: FArchiveWriter, property_type: str, properties: dict[str, Any]
|
||||
) -> int:
|
||||
if property_type != "ArrayProperty":
|
||||
raise Exception(f"Expected ArrayProperty, got {property_type}")
|
||||
del properties["custom_type"]
|
||||
encoded_bytes = encode_bytes(properties["value"])
|
||||
properties["value"] = {"values": [b for b in encoded_bytes]}
|
||||
return writer.property_inner(property_type, properties)
|
||||
|
||||
|
||||
def encode_bytes(p: dict[str, Any]) -> bytes:
|
||||
writer = FArchiveWriter()
|
||||
writer.guid(p["id"])
|
||||
writer.ftransform(p["spawn_transform"])
|
||||
writer.byte(p["current_order_type"])
|
||||
writer.byte(p["current_battle_type"])
|
||||
writer.guid(p["container_id"])
|
||||
encoded_bytes = writer.bytes()
|
||||
return encoded_bytes
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
# These are dependencies only for tests
|
||||
# Default usage of the library must not rely on any external dependencies!
|
||||
parameterized==0.9.0
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
import unittest
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
from lib.archive import FArchiveReader, FArchiveWriter
|
||||
|
||||
|
||||
class TestArchive(unittest.TestCase):
|
||||
@parameterized.expand(
|
||||
[
|
||||
(1.0, 1.0, 1.0),
|
||||
(0.0, 0.0, 0.0),
|
||||
(-1.0, -1.0, -1.0),
|
||||
(0.0, 0.0, 1.0),
|
||||
(0.0, 1.0, 0.0),
|
||||
(1.0, 0.0, 0.0),
|
||||
(0.0, 0.0, -1.0),
|
||||
(0.0, -1.0, 0.0),
|
||||
(-107929.0, -1815, 682),
|
||||
(107929, 1815, 682),
|
||||
(107929, -1815, -682),
|
||||
(-107929, 1815, -682),
|
||||
(12345678.0, -12345678.0, 12345678.0),
|
||||
(-12345678.0, 12345678.0, -12345678.0),
|
||||
(12345678.0, 12345678.0, -12345678.0),
|
||||
(-12345678.0, -12345678.0, 12345678.0),
|
||||
]
|
||||
)
|
||||
def test_packed_vector_roundtrip(self, x, y, z):
|
||||
writer = FArchiveWriter()
|
||||
writer.packed_vector(1, x, y, z)
|
||||
reader = FArchiveReader(writer.bytes())
|
||||
x_e, y_e, z_e = reader.packed_vector(1)
|
||||
self.assertEqual(x, x_e)
|
||||
self.assertEqual(y, y_e)
|
||||
self.assertEqual(z, z_e)
|
||||
|
|
@ -0,0 +1,108 @@
|
|||
import contextlib
|
||||
import os
|
||||
import subprocess
|
||||
import unittest
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
|
||||
class TestCliScripts(unittest.TestCase):
|
||||
@parameterized.expand(
|
||||
[
|
||||
("Level.sav"),
|
||||
("Level-tricky-unicode-player-name.sav"),
|
||||
("LevelMeta.sav"),
|
||||
("LocalData.sav"),
|
||||
("WorldOption.sav"),
|
||||
("00000000000000000000000000000001.sav"),
|
||||
("unicode-saves/Level.sav"),
|
||||
("unicode-saves/LevelMeta.sav"),
|
||||
("unicode-saves/LocalData.sav"),
|
||||
("unicode-saves/WorldOption.sav"),
|
||||
("unicode-saves/00000000000000000000000000000001.sav"),
|
||||
("larger-saves/Level.sav"),
|
||||
("larger-saves/LocalData.sav"),
|
||||
("larger-saves/00000000000000000000000000000001.sav"),
|
||||
]
|
||||
)
|
||||
def test_sav_roundtrip(self, file_name):
|
||||
try:
|
||||
base_name = os.path.basename(file_name)
|
||||
dir_name = os.path.dirname(file_name)
|
||||
# Convert sav to JSON
|
||||
run = subprocess.run(
|
||||
[
|
||||
"python3",
|
||||
"convert.py",
|
||||
f"tests/testdata/{dir_name}/{base_name}",
|
||||
]
|
||||
)
|
||||
self.assertEqual(run.returncode, 0)
|
||||
self.assertTrue(
|
||||
os.path.exists(f"tests/testdata/{dir_name}/{base_name}.json")
|
||||
)
|
||||
# Convert JSON back to sav
|
||||
os.rename(
|
||||
f"tests/testdata/{dir_name}/{base_name}.json",
|
||||
f"tests/testdata/{dir_name}/1-{base_name}.json",
|
||||
)
|
||||
run = subprocess.run(
|
||||
[
|
||||
"python3",
|
||||
"convert.py",
|
||||
f"tests/testdata/{dir_name}/1-{base_name}.json",
|
||||
]
|
||||
)
|
||||
self.assertEqual(run.returncode, 0)
|
||||
self.assertTrue(os.path.exists(f"tests/testdata/{dir_name}/1-{base_name}"))
|
||||
# Reconvert sav back to JSON
|
||||
os.rename(
|
||||
f"tests/testdata/{dir_name}/1-{base_name}",
|
||||
f"tests/testdata/{dir_name}/2-{base_name}",
|
||||
)
|
||||
run = subprocess.run(
|
||||
[
|
||||
"python3",
|
||||
"convert.py",
|
||||
f"tests/testdata/{dir_name}/2-{base_name}",
|
||||
]
|
||||
)
|
||||
self.assertEqual(run.returncode, 0)
|
||||
self.assertTrue(
|
||||
os.path.exists(f"tests/testdata/{dir_name}/2-{base_name}.json")
|
||||
)
|
||||
# Reconvert JSON back to sav
|
||||
os.rename(
|
||||
f"tests/testdata/{dir_name}/2-{base_name}.json",
|
||||
f"tests/testdata/{dir_name}/3-{base_name}.json",
|
||||
)
|
||||
run = subprocess.run(
|
||||
[
|
||||
"python3",
|
||||
"convert.py",
|
||||
f"tests/testdata/{dir_name}/3-{base_name}.json",
|
||||
]
|
||||
)
|
||||
self.assertEqual(run.returncode, 0)
|
||||
self.assertTrue(os.path.exists(f"tests/testdata/{dir_name}/3-{base_name}"))
|
||||
# Compare the final sav to the intermediate save
|
||||
with open(f"tests/testdata/{dir_name}/2-{base_name}", "rb") as f:
|
||||
intermediate_data = f.read()
|
||||
with open(f"tests/testdata/{dir_name}/3-{base_name}", "rb") as f:
|
||||
final_data = f.read()
|
||||
self.assertEqual(intermediate_data, final_data)
|
||||
finally:
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.remove(f"tests/testdata/{dir_name}/{base_name}.json")
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.remove(f"tests/testdata/{dir_name}/1-{base_name}")
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.remove(f"tests/testdata/{dir_name}/1-{base_name}.json")
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.remove(f"tests/testdata/{dir_name}/2-{base_name}")
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.remove(f"tests/testdata/{dir_name}/2-{base_name}.json")
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.remove(f"tests/testdata/{dir_name}/3-{base_name}")
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.remove(f"tests/testdata/{dir_name}/3-{base_name}.json")
|
||||
|
|
@ -0,0 +1,163 @@
|
|||
import base64
|
||||
import json
|
||||
import unittest
|
||||
from uuid import UUID
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
from lib.archive import FArchiveReader, FArchiveWriter
|
||||
from lib.gvas import GvasFile, GvasHeader
|
||||
from lib.noindent import CustomEncoder
|
||||
from lib.palsav import decompress_sav_to_gvas
|
||||
from lib.paltypes import PALWORLD_CUSTOM_PROPERTIES, PALWORLD_TYPE_HINTS
|
||||
|
||||
|
||||
class TestGvas(unittest.TestCase):
|
||||
def test_header(self):
|
||||
test_data = base64.b64decode(
|
||||
"R1ZBUwMAAAAKAgAA8AMAAAUAAQABAAAAAAASAAAAKytVRTUrUmVsZWFzZS01LjEAAwAAAEUAAACn+9JA5UxIS3VaOLCeSU6IBwAAAPp69fyDQnZQWOapuTItoP9MAAAAe0clCQFAPXZz1pGdEbR1CwEAAAAbIYhCxhZIRbJndhoAKnpQAQAAAMzOuRoTaQAAdUgAAPtRPSBkAAAAIZLvTDrUDkeMPWB+JleZFgEAAAB+fHHi00T1UkBTDJVeAxWzBwAAAO0KMRFhTVUuo5pnrywIocURAAAA+wyCp1lDpyAULFSMUM8jlhUAAAB4u9/25KBQu024GEAjr8tgAgAAAPN6uySDT0ZWwi0vH/+WrUkFAAAAKSOldrVFIwlB2K6Y2GovzwUAAAAHabxfrkDIVYTxZ44/8f9eAQAAAE5854KlQyMzxRNrtPMNMZcAAAAAbPb8D5lIkBH4nGCxXkdGSgEAAAAi1VScvk8mqEYHIZTQgrRhLAAAAOQy2LANT4kft37PrKJK/TYKAAAAKEPG4VNNLKKGjmyjjL0XZAAAAAA8wV43+0jkBvCEALV+cSomBAAAAO1osOTpQpT0C9oxokG7Ri4oAAAAP3T8z4BEsEPfFJGTcyAdFyUAAAC1SSuw6UQgu7cyBKNgA+RSAwAAAFwQ5KS1SaFZxEDFp+7fflQAAAAAyTHIOdxH5loXnESafI4cPgAAAAAzG/B4mE/q6+qEtLmiWrnMFAAAAA84MWbgQ00tJ88JgFqpVmkAAAAAn4v4EvxKdYgM2XymKb06OC0AAABM51p7EExw0phXWKlaKiELDQAAABhpKdfdS9YdqGTinYQ4wTwDAAAAeFKhwv5K57//kBdsVfcdUwEAAADUo6xuwUzsQO2LhrfFj0IJAwAAAN115SknRqPgdtIQnercLCMRAAAAXaZDr0dJ03+OPnOYBbvB2Q8AAADsbCZrj0vHHtnkC6MH/EIJAQAAAGE99w3qRz+i6Yknt5pJQQwBAAAAhhgdYIRPZKze0xaq1sfqDVAAAAC3Bkxb+EpjJHC/W4Dd0PXNCgAAAGhjCOdYTCNrcBs5hJFeJhYEAAAA1rz/nVgBT0mCEiHiiKiSPAoAAACs0K7yb0H+mn+qZIb81ib6AQAAAAsfTxelRca06C4/sX2R+9AKAAAAg0r5NWxAWOL1CRijfCQQlikAAABuwY+24kIbi1whU7T+RIgFAQAAAAaF4bLCz3NCu/ROpQe6i3UBAAAANon1ZLpCG/2Jcpa6TvrQ1QEAAACB1X1pq0FP5uxRSqootre+WAAAAEJem9hGTb0kqKwShHkXZN8pAAAAUl3aWUhJMhJ4WXi4i+m4cAgAAAAyWgcmCEcPczKM6YgFnVnxAAAAACfYDm+VSAmmjZmRnKQOGJACAAAA44vVMIJC6pVZseOmarDr2AEAAADnnn9xOkmw6TKRs4gHgTgbEAAAABlNDENwSVRxaZtph+WwkN8PAAAAvTL+qhRMlVMlXmq23dEyEAEAAACO4a8jWE7hTFLCYY23vlO5CwAAAOq3YqQ6Tpn0H+zBmbLhJIIEAAAAvf21LhBNrAGP8zaB2qWTMwUAAABPNZ1QL0nm9rKFSaccYzwHAAAAABwb47bsEZ/ShZ9+heJwmW8BAAAAQOtWStwR9RB+NNOS52rJsgIAAAAASorXl0ZY6LUZqLq0Rn1IEgAAAIb4eVUfTDqTewi6gy+5YWMCAAAAUr4vYQtAU9qRTw2RfIWxnwEAAAA2eiOkyUHqyvgYoo/zG2hYBQAAAHU/ToBJS4hwBozWpNy2fjwFAAAA9EjQHmhMLi+kU9CJLRCP8QEAAADyCmj7o0vvWbUZqLo9RMhzAgAAAA63UJkXThq0DfrMu9Z/gVcBAAAAllGWq/wI2EWNIte3nlateAEAAAAdAAAAL1NjcmlwdC9QYWwuUGFsV29ybGRTYXZlR2FtZQA="
|
||||
)
|
||||
reader = FArchiveReader(test_data)
|
||||
header = GvasHeader.read(reader)
|
||||
expected_header = {
|
||||
"magic": 1396790855,
|
||||
"save_game_version": 3,
|
||||
"package_file_version_ue4": 522,
|
||||
"package_file_version_ue5": 1008,
|
||||
"engine_version_major": 5,
|
||||
"engine_version_minor": 1,
|
||||
"engine_version_patch": 1,
|
||||
"engine_version_changelist": 0,
|
||||
"engine_version_branch": "++UE5+Release-5.1",
|
||||
"custom_version_format": 3,
|
||||
"custom_versions": [
|
||||
(UUID("40d2fba7-4b48-4ce5-b038-5a75884e499e"), 7),
|
||||
(UUID("fcf57afa-5076-4283-b9a9-e658ffa02d32"), 76),
|
||||
(UUID("0925477b-763d-4001-9d91-d6730b75b411"), 1),
|
||||
(UUID("4288211b-4548-16c6-1a76-67b2507a2a00"), 1),
|
||||
(UUID("1ab9cecc-0000-6913-0000-4875203d51fb"), 100),
|
||||
(UUID("4cef9221-470e-d43a-7e60-3d8c16995726"), 1),
|
||||
(UUID("e2717c7e-52f5-44d3-950c-5340b315035e"), 7),
|
||||
(UUID("11310aed-2e55-4d61-af67-9aa3c5a1082c"), 17),
|
||||
(UUID("a7820cfb-20a7-4359-8c54-2c149623cf50"), 21),
|
||||
(UUID("f6dfbb78-bb50-a0e4-4018-b84d60cbaf23"), 2),
|
||||
(UUID("24bb7af3-5646-4f83-1f2f-2dc249ad96ff"), 5),
|
||||
(UUID("76a52329-0923-45b5-98ae-d841cf2f6ad8"), 5),
|
||||
(UUID("5fbc6907-55c8-40ae-8e67-f1845efff13f"), 1),
|
||||
(UUID("82e77c4e-3323-43a5-b46b-13c597310df3"), 0),
|
||||
(UUID("0ffcf66c-1190-4899-b160-9cf84a46475e"), 1),
|
||||
(UUID("9c54d522-a826-4fbe-9421-074661b482d0"), 44),
|
||||
(UUID("b0d832e4-1f89-4f0d-accf-7eb736fd4aa2"), 10),
|
||||
(UUID("e1c64328-a22c-4d53-a36c-8e866417bd8c"), 0),
|
||||
(UUID("375ec13c-06e4-48fb-b500-84f0262a717e"), 4),
|
||||
(UUID("e4b068ed-f494-42e9-a231-da0b2e46bb41"), 40),
|
||||
(UUID("cffc743f-43b0-4480-9391-14df171d2073"), 37),
|
||||
(UUID("b02b49b5-bb20-44e9-a304-32b752e40360"), 3),
|
||||
(UUID("a4e4105c-59a1-49b5-a7c5-40c4547edfee"), 0),
|
||||
(UUID("39c831c9-5ae6-47dc-9a44-9c173e1c8e7c"), 0),
|
||||
(UUID("78f01b33-ebea-4f98-b9b4-84eaccb95aa2"), 20),
|
||||
(UUID("6631380f-2d4d-43e0-8009-cf276956a95a"), 0),
|
||||
(UUID("12f88b9f-8875-4afc-a67c-d90c383abd29"), 45),
|
||||
(UUID("7b5ae74c-d270-4c10-a958-57980b212a5a"), 13),
|
||||
(UUID("d7296918-1dd6-4bdd-9de2-64a83cc13884"), 3),
|
||||
(UUID("c2a15278-bfe7-4afe-6c17-90ff531df755"), 1),
|
||||
(UUID("6eaca3d4-40ec-4cc1-b786-8bed09428fc5"), 3),
|
||||
(UUID("29e575dd-e0a3-4627-9d10-d276232cdcea"), 17),
|
||||
(UUID("af43a65d-7fd3-4947-9873-3e8ed9c1bb05"), 15),
|
||||
(UUID("6b266cec-1ec7-4b8f-a30b-e4d90942fc07"), 1),
|
||||
(UUID("0df73d61-a23f-47ea-b727-89e90c41499a"), 1),
|
||||
(UUID("601d1886-ac64-4f84-aa16-d3de0deac7d6"), 80),
|
||||
(UUID("5b4c06b7-2463-4af8-805b-bf70cdf5d0dd"), 10),
|
||||
(UUID("e7086368-6b23-4c58-8439-1b7016265e91"), 4),
|
||||
(UUID("9dffbcd6-494f-0158-e221-12823c92a888"), 10),
|
||||
(UUID("f2aed0ac-9afe-416f-8664-aa7ffa26d6fc"), 1),
|
||||
(UUID("174f1f0b-b4c6-45a5-b13f-2ee8d0fb917d"), 10),
|
||||
(UUID("35f94a83-e258-406c-a318-09f59610247c"), 41),
|
||||
(UUID("b68fc16e-8b1b-42e2-b453-215c058844fe"), 1),
|
||||
(UUID("b2e18506-4273-cfc2-a54e-f4bb758bba07"), 1),
|
||||
(UUID("64f58936-fd1b-42ba-ba96-7289d5d0fa4e"), 1),
|
||||
(UUID("697dd581-e64f-41ab-aa4a-51ecbeb7b628"), 88),
|
||||
(UUID("d89b5e42-24bd-4d46-8412-aca8df641779"), 41),
|
||||
(UUID("59da5d52-1232-4948-b878-597870b8e98b"), 8),
|
||||
(UUID("26075a32-730f-4708-88e9-8c32f1599d05"), 0),
|
||||
(UUID("6f0ed827-a609-4895-9c91-998d90180ea4"), 2),
|
||||
(UUID("30d58be3-95ea-4282-a6e3-b159d8ebb06a"), 1),
|
||||
(UUID("717f9ee7-e9b0-493a-88b3-91321b388107"), 16),
|
||||
(UUID("430c4d19-7154-4970-8769-9b69df90b0e5"), 15),
|
||||
(UUID("aafe32bd-5395-4c14-b66a-5e251032d1dd"), 1),
|
||||
(UUID("23afe18e-4ce1-4e58-8d61-c252b953beb7"), 11),
|
||||
(UUID("a462b7ea-f499-4e3a-99c1-ec1f8224e1b2"), 4),
|
||||
(UUID("2eb5fdbd-01ac-4d10-8136-f38f3393a5da"), 5),
|
||||
(UUID("509d354f-f6e6-492f-a749-85b2073c631c"), 0),
|
||||
(UUID("b6e31b1c-d29f-11ec-857e-9f856f9970e2"), 1),
|
||||
(UUID("4a56eb40-10f5-11dc-92d3-347eb2c96ae7"), 2),
|
||||
(UUID("d78a4a00-e858-4697-baa8-19b5487d46b4"), 18),
|
||||
(UUID("5579f886-933a-4c1f-83ba-087b6361b92f"), 2),
|
||||
(UUID("612fbe52-da53-400b-910d-4f919fb1857c"), 1),
|
||||
(UUID("a4237a36-caea-41c9-8fa2-18f858681bf3"), 5),
|
||||
(UUID("804e3f75-7088-4b49-a4d6-8c063c7eb6dc"), 5),
|
||||
(UUID("1ed048f4-2f2e-4c68-89d0-53a4f18f102d"), 1),
|
||||
(UUID("fb680af2-59ef-4ba3-baa8-19b573c8443d"), 2),
|
||||
(UUID("9950b70e-b41a-4e17-bbcc-fa0d57817fd6"), 1),
|
||||
(UUID("ab965196-45d8-08fc-b7d7-228d78ad569e"), 1),
|
||||
],
|
||||
"save_game_class_name": "/Script/Pal.PalWorldSaveGame",
|
||||
}
|
||||
self.assertEqual(
|
||||
header.dump(), expected_header, "header does not match expected"
|
||||
)
|
||||
writer = FArchiveWriter()
|
||||
header.write(writer)
|
||||
self.assertEqual(
|
||||
writer.bytes(), test_data, "header does not match expected after encoding"
|
||||
)
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
("Level.sav", "/Script/Pal.PalWorldSaveGame"),
|
||||
("Level-tricky-unicode-player-name.sav", "/Script/Pal.PalWorldSaveGame"),
|
||||
("LevelMeta.sav", "/Script/Pal.PalWorldBaseInfoSaveGame"),
|
||||
("LocalData.sav", "/Script/Pal.PalLocalWorldSaveGame"),
|
||||
("WorldOption.sav", "/Script/Pal.PalWorldOptionSaveGame"),
|
||||
(
|
||||
"00000000000000000000000000000001.sav",
|
||||
"/Script/Pal.PalWorldPlayerSaveGame",
|
||||
),
|
||||
("unicode-saves/Level.sav", "/Script/Pal.PalWorldSaveGame"),
|
||||
("unicode-saves/LevelMeta.sav", "/Script/Pal.PalWorldBaseInfoSaveGame"),
|
||||
("unicode-saves/LocalData.sav", "/Script/Pal.PalLocalWorldSaveGame"),
|
||||
("unicode-saves/WorldOption.sav", "/Script/Pal.PalWorldOptionSaveGame"),
|
||||
(
|
||||
"unicode-saves/00000000000000000000000000000001.sav",
|
||||
"/Script/Pal.PalWorldPlayerSaveGame",
|
||||
),
|
||||
("larger-saves/Level.sav", "/Script/Pal.PalWorldSaveGame"),
|
||||
("larger-saves/LocalData.sav", "/Script/Pal.PalLocalWorldSaveGame"),
|
||||
(
|
||||
"larger-saves/00000000000000000000000000000001.sav",
|
||||
"/Script/Pal.PalWorldPlayerSaveGame",
|
||||
),
|
||||
]
|
||||
)
|
||||
def test_sav_roundtrip(self, file_name, expected_save_game_class_name):
|
||||
with open("tests/testdata/" + file_name, "rb") as f:
|
||||
data = f.read()
|
||||
gvas_data, _ = decompress_sav_to_gvas(data)
|
||||
gvas_file = GvasFile.read(
|
||||
gvas_data, PALWORLD_TYPE_HINTS, PALWORLD_CUSTOM_PROPERTIES
|
||||
)
|
||||
self.assertEqual(
|
||||
gvas_file.header.dump()["save_game_class_name"],
|
||||
expected_save_game_class_name,
|
||||
"sav save_game_class_name does not match expected",
|
||||
)
|
||||
dump = gvas_file.dump()
|
||||
js = json.dumps(dump, cls=CustomEncoder)
|
||||
new_js = json.loads(js)
|
||||
new_gvas_file = GvasFile.load(new_js)
|
||||
new_gvas_data = new_gvas_file.write(PALWORLD_CUSTOM_PROPERTIES)
|
||||
self.assertEqual(
|
||||
gvas_data,
|
||||
new_gvas_data,
|
||||
"sav does not match expected after roundtrip",
|
||||
)
|
||||
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue