gck-map-extract-objects/obj2gbs.py

447 lines
16 KiB
Python

import os
from typing import List, Union
from lib.packet import Packet
from lib.fileutils import *
from lib.game import Vec3, VecRGB, cross
import argparse
import sys
import json
GBS_VERSION = 0xaa0100be
GBSFlagNormals = 0x0001
GBSFlagUVs = 0x0002
GBSFlagRGBs = 0x0004
GBSFlagCalcNormals = 0x0008
GBSFlagMaxLit = (1 << 31)
def check(gbs_file, materials):
return
with open("data.json") as fp:
d = json.load(fp)
possible_binfiles = []
for binfile in d:
if gbs_file in d[binfile]["objects"]:
possible_binfiles.append(binfile)
if not possible_binfiles:
print("ERROR: %s was not found in any bin file. Your model won't be loaded by the game at all." % gbs_file)
print("Please use a supported model name. Check the models archive for a valid name.")
sys.exit(1)
possible_texfiles = []
for material in materials:
tex = material.texture.replace(".tga", "")
found_in = []
for possible_binfile in possible_binfiles:
if tex in d[possible_binfile]["textures"]:
found_in.append(possible_binfile)
possible_texfiles.append(possible_binfile)
if not found_in:
print("ERROR: texture %s is not a supported texture name or is referenced by another bin file which is not loaded with your object." % tex)
print("Please check the textures archive for a valid name.")
sys.exit(1)
loaded = list(set(possible_texfiles) & set(possible_binfiles))
print("INFO: Your model will be loaded when one of these are loaded on the map: %s" % ", ".join(loaded))
def resize(l: List, t, num):
l.clear()
for i in range(num):
l.append(t())
class FileMaxObj:
def __init__(self):
self.vstart: int = 0
self.vcount: int = 0
self.nstart: int = 0
self.ncount: int = 0
self.noffset: int = 0
class MaxObj(FileMaxObj):
def __init__(self):
super().__init__()
self.fstart: int = 0
self.fcount: int = 0
self.sostart: int = 0
self.socount: int = 0
class SubObject:
def __init__(self):
self.objname: str = ""
self.maxobjindex: int = 0
self.ntris: int = 0 # count of tridata (including preceding 'count' short)
self.totaltris: int = 0
self.tridata: List[int] = [] # unsigned short
self.verticeref_start: int = 0
self.verticeref_count: int = 0
self.texname: str = ""
self.bumptexture: str = ""
self.falloff: float = 0
self.blend: float = 0
self.flags: int = 0
self.emissive: int = 0
self.ambient: int = 0
self.diffuse: int = 0
self.specular: int = 0
self.power: int = 0
class UV:
def __init__(self, u: float = 0, v: float = 0):
self.u: float = u
self.v: float = v
class OBJMaterial:
def __init__(self):
self.name = ""
self.texture = ""
class OBJObject:
def __init__(self):
self.faces: List[OBJFace] = []
self.name = "root"
self.material: Union[None, OBJMaterial] = None
self.vref_start = 0
self.vref_count = 0
class OBJFace:
def __init__(self):
self.index_vertices: List[int] = []
self.index_uvs: List[int] = []
self.index_normals: List[int] = []
def obj_read_materials(matlib_file) -> List[OBJMaterial]:
materials = []
curr_mat = None
with open(matlib_file, "r") as fp:
while line := fp.readline():
line = line.strip()
arr = line.split(" ")
if arr[0] == "newmtl":
if len(arr) <= 1:
curr_mat = None
continue
mat = OBJMaterial()
materials.append(mat)
mat.name = arr[1].rstrip()
curr_mat = mat
if arr[0] == "map_Ka" or arr[0] == "map_Kd":
if curr_mat:
matname_without_ext = "".join(arr[1:]).split("/")[-1]
matname_without_ext = "".join(matname_without_ext.split(".")[0:-1])
curr_mat.texture = matname_without_ext
# print("Set %s to %s" % (curr_mat.texture, curr_mat.name))
return materials
class GbsData:
def __init__(self):
self.name = ""
self.optionsflags: int = 0
self.nndefs: int = 0
self.num_normals: int = 0
self.normals: List[int] = [] # word
self.indexed_normals: List[int] = [] # unsigned short
self.num_vertices: int = 0
self.vertices: List[Vec3] = []
self.nsobjs: int = 0
self.nmobjs: int = 0
self.indexed_vertices: List[int] = [] # unsigned short
self.vertrgb: List[VecRGB] = []
self.nverts: int = 0
self.vertuv: List[UV] = []
self.MaxObjs: List[MaxObj] = []
self.SubObjs: List[SubObject] = []
@staticmethod
def evaluate_tridata(tridata, tri_idx, count):
if count == 0:
count = tridata[0]
if count == 0:
return False
tri_idx = 0
v1 = tridata[tri_idx + 1]
v2 = tridata[tri_idx + 2]
v3 = tridata[tri_idx + 3]
tri_idx += 3
count -= 1
if count < 0:
count = 0xffff # max unsigned short
if count == 0:
tridata = tridata[tridata[0] * 3 + 1:]
return tridata, tri_idx, count, v1, v2, v3
def generate_normals(self):
normals: List[Vec3] = []
resize(normals, Vec3, self.num_vertices)
for subobj_i in range(len(self.SubObjs)):
subobj = self.SubObjs[subobj_i]
tridata = subobj.tridata
values = self.evaluate_tridata(tridata, -1, 0)
while values:
tridata, tri_idx, count, v1, v2, v3 = values
try:
p = cross(self.vertices[self.indexed_vertices[v2]] - self.vertices[self.indexed_vertices[v1]], self.vertices[self.indexed_vertices[v3]] - self.vertices[self.indexed_vertices[v1]])
except Exception as e:
raise e
normals[self.indexed_vertices[v1]] += p
normals[self.indexed_vertices[v2]] += p
normals[self.indexed_vertices[v3]] += p
values = self.evaluate_tridata(tridata, tri_idx, count)
for i in range(len(normals)):
normals[i] = normals[i].normalize()
return normals
@staticmethod
def from_obj(obj_file):
materials = []
vertices: List[Vec3] = []
uvs: List[UV] = []
normals: List[Vec3] = []
objects: List[OBJObject] = []
root_obj = OBJObject()
max_objs: List[MaxObj] = []
objects.append(root_obj)
last_material = None
current_object = root_obj
uv_ind = {}
with open(obj_file, "r") as obj_fp:
while line := obj_fp.readline():
arr = line.split(" ")
if arr[0] == "v":
v = Vec3(float(arr[1]), float(arr[2]), float(arr[3]))
vertices.append(v)
if arr[0] == "vt":
v = UV(float(arr[1]), float(arr[2]))
uvs.append(v)
if arr[0] == "vn":
v = Vec3(float(arr[1]), float(arr[2]), float(arr[3]))
normals.append(v)
if arr[0] == "f":
f = OBJFace()
f1_s = arr[1].split("/")
f2_s = arr[2].split("/")
f3_s = arr[3].split("/")
if len(f1_s) == len(f2_s) == len(f3_s) >= 1:
# face only has vertex index data
v1_index = int(f1_s[0])
v2_index = int(f2_s[0])
v3_index = int(f3_s[0])
f.index_vertices.append(v1_index)
f.index_vertices.append(v2_index)
f.index_vertices.append(v3_index)
if len(f1_s) == len(f2_s) == len(f3_s) >= 2:
# face has vertex index and uv
v1_uv_index = int(f1_s[1])
v2_uv_index = int(f2_s[1])
v3_uv_index = int(f3_s[1])
uv_ind[v1_uv_index] = v1_index
uv_ind[v2_uv_index] = v2_index
uv_ind[v3_uv_index] = v3_index
f.index_uvs.append(v1_uv_index)
f.index_uvs.append(v2_uv_index)
f.index_uvs.append(v3_uv_index)
if len(f1_s) == len(f2_s) == len(f3_s) >= 3:
# face has vertex index and uv and normal
v1_normal_index = int(f1_s[2])
v2_normal_index = int(f2_s[2])
v3_normal_index = int(f3_s[2])
f.index_normals.append(v1_normal_index)
f.index_normals.append(v2_normal_index)
f.index_normals.append(v3_normal_index)
current_object.faces.append(f)
if arr[0] == "o":
obj_line = arr[-1].rstrip()
try:
end = obj_line.index("_#_")
except ValueError:
end = None
obj_name = obj_line[0:end]
o = OBJObject()
o.name = obj_name
o.material = last_material
if end:
meta = obj_line[end+3:].split("_")
o.vref_start = int(meta[0])
o.vref_count = int(meta[1])
objects.append(o)
if len(current_object.faces) == 0:
objects.remove(current_object)
current_object = o
if arr[0] == "usemtl" and len(arr) > 1:
mtl_name = arr[1].rstrip()
if mtl_name:
mtl = [mat for mat in materials if mat.name == mtl_name][0]
current_object.material = mtl
last_material = mtl
if arr[0] == "mtllib":
matlib_file = arr[1].rstrip()
obj_mat = "%s/%s" % (os.path.dirname(obj_file), matlib_file)
print(obj_mat)
materials = obj_read_materials(obj_mat)
if arr[0] == "#" and arr[1] == "maxobj":
max_obj = MaxObj()
max_obj.vstart = int(arr[2])
max_obj.vcount = int(arr[3])
max_obj.nstart = int(arr[4])
max_obj.ncount = int(arr[5])
max_obj.noffset = int(arr[6])
max_objs.append(max_obj)
num_faces = sum([len(o.faces) for o in objects])
print("%s vertices, %s uvs, %s normals, %s objects, %s materials, %s faces" % (len(vertices), len(uvs), len(normals), len(objects), len(materials), num_faces))
try:
assert(len(vertices) < 32000)
except AssertionError:
print("Your model has %s vertices. Giants only supports a maximum of 32k vertices" % len(vertices))
sys.exit(1)
try:
assert(len(uv_ind) < 0xffff)
except AssertionError:
print("Your model has %s UV. Giants only supports a maximum of 65k UV per model" % len(uv_ind))
sys.exit(1)
check(os.path.basename(obj_file).replace(".obj", ""), materials)
len_vertices = len(vertices)
len_normals = len(normals)
data = Packet()
data.put_ulong(GBS_VERSION)
options = 0
if len(uvs) > 0:
options |= GBSFlagUVs
options |= GBSFlagCalcNormals
data.put_long(options)
data.put_long(len_vertices)
for v in vertices:
data.put_float(v.x)
data.put_float(v.y)
data.put_float(v.z)
if options & GBSFlagNormals:
for v in normals:
data.put_float(v.x)
data.put_float(v.y)
data.put_float(v.z)
data.put_ulong(len_normals)
data.put_ulong(len_normals)
for i in range(len_normals):
data.put_short(len_vertices + i)
assert len_vertices <= len(uvs)
indices = []
for uv_in in sorted(uv_ind.keys()):
indices.append(uv_ind[uv_in]-1)
nverts = max(len_vertices, len(uvs))
assert nverts == len(indices)
data.put_ulong(nverts)
for i in indices:
# indexed vert/UV
data.put_short(i)
if options & GBSFlagUVs:
for v in uvs:
data.put_float(v.u)
data.put_float(v.v * -1)
# max objects
print("There are %s max objects" % len(max_objs))
if not max_objs:
data.put_long(1) # 1 big object
data.put_long(0) # vstart
data.put_long(len_vertices) # vcount
data.put_long(0) # nstart
data.put_long(0) # ncount
data.put_long(0) # noffset ???
else:
data.put_long(len(max_objs))
for i in range(len(max_objs)):
max_obj = max_objs[i]
data.put_long(max_obj.vstart)
data.put_long(max_obj.vcount)
data.put_long(max_obj.nstart)
data.put_long(max_obj.ncount)
data.put_long(max_obj.noffset)
# start write subobjects
data.put_long(len(objects))
print("THERE ARE %s subobjects" % len(objects))
for obj in objects:
data.put_string_size(obj.name, 32)
data.put_long(0) # max obj index
data.put_long(len(obj.faces)) # totaltris
data.put_long(3 * len(obj.faces) + 1) # ntris
data.put_short(len(obj.faces))
for face in obj.faces: # obj.faces.length == ntris
data.put_short(face.index_uvs[0] - 1)
data.put_short(face.index_uvs[1] - 1)
data.put_short(face.index_uvs[2] - 1)
data.put_long(obj.vref_start) # verticeref_start
data.put_long(nverts if obj.vref_count == 0 else obj.vref_count) # verticeref_count
if options & GBSFlagUVs:
data.put_string_size(obj.material.texture, 32) # texture
data.put_string_size(obj.material.texture, 32) # bump
data.put_float(0) # falloff
if options & GBSFlagRGBs:
data.put_float(0) # blend
data.put_long(0x40000000) # flags
data.put_long(0) # emissive
data.put_long(0) # ambient
data.put_long(0) # diffuse
data.put_long(0) # specular
data.put_float(0) # power
# write gbs file
output = "%s/%s.gbs" % (os.path.dirname(os.path.abspath(obj_file)), os.path.basename(obj_file))
with open(output, "wb") as gbs:
gbs.write(data.getvalue())
def __str__(self):
return "[name: %s, nverts:%s, num_vertices: %s, vertuv: %s, normals: %s, maxobjs: %s, subobjs:%s]" % (self.name, self.nverts, self.num_vertices, len(self.vertuv), self.num_normals, self.nmobjs, self.nsobjs)
def convert_obj(path):
GbsData.from_obj(path)
output = "%s/%s.gbs" % (os.path.dirname(os.path.abspath(path)), os.path.basename(path))
print("Done! Output: %s" % output)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
if os.path.exists(args.path):
convert_obj(args.path)
else:
print("ERROR: file %s does not exist" % args.path)