Initial commit of new FBX 7.4 binary exporter
What to expect: * Static export of empties, meshes, cameras and lamps, as well as materials and (image!) textures should work OK. There are a few advanced topics still TODO regarding meshes and mat/tex, though. * Custom properties from objects/meshes/lamps/cameras/armatures/bones/materials/textures are exported too (only simple ones, ints/floats/strings). * Armature export: this needs testing by people having *native* FBX aplications, linking between bones and meshes seems to work, but I have doubts about bones orientations. * Animation: still a complete TODO. Note that old FBX ASCII 6.1 exporter is still available (top dropdown in exporter's UI). Many thanks to Campbell, which did the ground work of decyphering FBX binary format and wrote basic code to read/write it.
This commit is contained in:
parent
a7da6cfa5b
commit
3879603620
|
@ -16,12 +16,12 @@
|
|||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8-80 compliant>
|
||||
# <pep8 compliant>
|
||||
|
||||
bl_info = {
|
||||
"name": "Autodesk FBX format",
|
||||
"author": "Campbell Barton",
|
||||
"blender": (2, 59, 0),
|
||||
"author": "Campbell Barton, Bastien Montagne",
|
||||
"blender": (2, 70, 0),
|
||||
"location": "File > Import-Export",
|
||||
"description": "Export FBX meshes, UV's, vertex colors, materials, "
|
||||
"textures, cameras, lamps and actions",
|
||||
|
@ -37,6 +37,8 @@ if "bpy" in locals():
|
|||
import imp
|
||||
if "import_fbx" in locals():
|
||||
imp.reload(import_fbx)
|
||||
if "export_fbx_bin" in locals():
|
||||
imp.reload(export_fbx_bin)
|
||||
if "export_fbx" in locals():
|
||||
imp.reload(export_fbx)
|
||||
|
||||
|
@ -148,6 +150,15 @@ class ExportFBX(bpy.types.Operator, ExportHelper):
|
|||
# List of operator properties, the attributes will be assigned
|
||||
# to the class instance from the operator settings before calling.
|
||||
|
||||
version = EnumProperty(
|
||||
items=(('BIN7400', "FBX 7.4 binary", "Newer 7.4 binary version, still in development (no animation yet)"),
|
||||
('ASCII6100', "FBX 6.1 ASCII", "Legacy 6.1 ascii version"),
|
||||
),
|
||||
name="Exporter Version",
|
||||
description="Choose which version of the exporter to use",
|
||||
default='BIN7400',
|
||||
)
|
||||
|
||||
use_selection = BoolProperty(
|
||||
name="Selected Objects",
|
||||
description="Export selected objects on visible layers",
|
||||
|
@ -211,7 +222,18 @@ class ExportFBX(bpy.types.Operator, ExportHelper):
|
|||
)
|
||||
|
||||
use_mesh_edges = BoolProperty(
|
||||
name="Include Edges",
|
||||
name="Include Loose Edges",
|
||||
default=False,
|
||||
)
|
||||
use_tspace = BoolProperty(
|
||||
name="Include Tangent Space",
|
||||
description=("Add binormal and tangent vectors, together with normal they form the tangent space "
|
||||
"(will only work correctly with tris/quads only meshes!)"),
|
||||
default=False,
|
||||
)
|
||||
use_custom_properties = BoolProperty(
|
||||
name="Custom Properties",
|
||||
description="Export custom properties",
|
||||
default=False,
|
||||
)
|
||||
use_armature_deform_only = BoolProperty(
|
||||
|
@ -251,6 +273,11 @@ class ExportFBX(bpy.types.Operator, ExportHelper):
|
|||
default=6.0, # default: 10^-4 frames.
|
||||
)
|
||||
path_mode = path_reference_mode
|
||||
embed_textures = BoolProperty(
|
||||
name="Embed Textures",
|
||||
description="Embed textures in FBX binary file (only for \"Copy\" path mode!)",
|
||||
default=False,
|
||||
)
|
||||
batch_mode = EnumProperty(
|
||||
name="Batch Mode",
|
||||
items=(('OFF', "Off", "Active scene to file"),
|
||||
|
@ -284,17 +311,19 @@ class ExportFBX(bpy.types.Operator, ExportHelper):
|
|||
to_up=self.axis_up,
|
||||
).to_4x4())
|
||||
|
||||
keywords = self.as_keywords(ignore=("axis_forward",
|
||||
"axis_up",
|
||||
"global_scale",
|
||||
keywords = self.as_keywords(ignore=("global_scale",
|
||||
"check_existing",
|
||||
"filter_glob",
|
||||
))
|
||||
|
||||
keywords["global_matrix"] = global_matrix
|
||||
|
||||
from . import export_fbx
|
||||
return export_fbx.save(self, context, **keywords)
|
||||
if self.version == 'BIN7400':
|
||||
from . import export_fbx_bin
|
||||
return export_fbx_bin.save(self, context, **keywords)
|
||||
else:
|
||||
from . import export_fbx
|
||||
return export_fbx.save(self, context, **keywords)
|
||||
|
||||
|
||||
def menu_func_import(self, context):
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Script copyright (C) 2006-2012, assimp team
|
||||
# Script copyright (C) 2013 Blender Foundation
|
||||
|
||||
BOOL = b'C'[0]
|
||||
INT16 = b'Y'[0]
|
||||
INT32 = b'I'[0]
|
||||
INT64 = b'L'[0]
|
||||
FLOAT32 = b'F'[0]
|
||||
FLOAT64 = b'D'[0]
|
||||
BYTES = b'R'[0]
|
||||
STRING = b'S'[0]
|
||||
INT32_ARRAY = b'i'[0]
|
||||
INT64_ARRAY = b'l'[0]
|
||||
FLOAT32_ARRAY = b'f'[0]
|
||||
FLOAT64_ARRAY = b'd'[0]
|
||||
BOOL_ARRAY = b'b'[0]
|
||||
BYTE_ARRAY = b'c'[0]
|
||||
|
||||
# array types - actual length may vary (depending on underlying C implementation)!
|
||||
import array
|
||||
|
||||
# For now, bytes and bool are assumed always 1byte.
|
||||
ARRAY_BOOL = 'b'
|
||||
ARRAY_BYTE = 'B'
|
||||
|
||||
ARRAY_INT32 = None
|
||||
ARRAY_INT64 = None
|
||||
for _t in 'ilq':
|
||||
size = array.array(_t).itemsize
|
||||
if size == 4:
|
||||
ARRAY_INT32 = _t
|
||||
elif size == 8:
|
||||
ARRAY_INT64 = _t
|
||||
if ARRAY_INT32 and ARRAY_INT64:
|
||||
break
|
||||
if not ARRAY_INT32:
|
||||
raise Exception("Impossible to get a 4-bytes integer type for array!")
|
||||
if not ARRAY_INT64:
|
||||
raise Exception("Impossible to get an 8-bytes integer type for array!")
|
||||
|
||||
ARRAY_FLOAT32 = None
|
||||
ARRAY_FLOAT64 = None
|
||||
for _t in 'fd':
|
||||
size = array.array(_t).itemsize
|
||||
if size == 4:
|
||||
ARRAY_FLOAT32 = _t
|
||||
elif size == 8:
|
||||
ARRAY_FLOAT64 = _t
|
||||
if ARRAY_FLOAT32 and ARRAY_FLOAT64:
|
||||
break
|
||||
if not ARRAY_FLOAT32:
|
||||
raise Exception("Impossible to get a 4-bytes float type for array!")
|
||||
if not ARRAY_FLOAT64:
|
||||
raise Exception("Impossible to get an 8-bytes float type for array!")
|
|
@ -0,0 +1,320 @@
|
|||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Script copyright (C) 2013 Campbell Barton
|
||||
|
||||
try:
|
||||
from . import data_types
|
||||
except:
|
||||
import data_types
|
||||
|
||||
from struct import pack
|
||||
import array
|
||||
import zlib
|
||||
|
||||
_BLOCK_SENTINEL_LENGTH = 13
|
||||
_BLOCK_SENTINEL_DATA = (b'\0' * _BLOCK_SENTINEL_LENGTH)
|
||||
_IS_BIG_ENDIAN = (__import__("sys").byteorder != 'little')
|
||||
_HEAD_MAGIC = b'Kaydara FBX Binary\x20\x20\x00\x1a\x00'
|
||||
|
||||
# fbx has very strict CRC rules, all based on file timestamp
|
||||
# until we figure these out, write files at a fixed time. (workaround!)
|
||||
|
||||
# Assumes: CreationTime
|
||||
_TIME_ID = b'1970-01-01 10:00:00:000'
|
||||
_FILE_ID = b'\x28\xb3\x2a\xeb\xb6\x24\xcc\xc2\xbf\xc8\xb0\x2a\xa9\x2b\xfc\xf1'
|
||||
_FOOT_ID = b'\xfa\xbc\xab\x09\xd0\xc8\xd4\x66\xb1\x76\xfb\x83\x1c\xf7\x26\x7e'
|
||||
|
||||
|
||||
class FBXElem:
|
||||
__slots__ = (
|
||||
"id",
|
||||
"props",
|
||||
"props_type",
|
||||
"elems",
|
||||
|
||||
"_props_length", # combine length of props
|
||||
"_end_offset", # byte offset from the start of the file.
|
||||
)
|
||||
|
||||
def __init__(self, id):
|
||||
assert(len(id) < 256) # length must fit in a uint8
|
||||
self.id = id
|
||||
self.props = []
|
||||
self.props_type = bytearray()
|
||||
self.elems = []
|
||||
self._end_offset = -1
|
||||
self._props_length = -1
|
||||
|
||||
def add_bool(self, data):
|
||||
assert(isinstance(data, bool))
|
||||
data = pack('?', data)
|
||||
|
||||
self.props_type.append(data_types.BOOL)
|
||||
self.props.append(data)
|
||||
|
||||
def add_int16(self, data):
|
||||
assert(isinstance(data, int))
|
||||
data = pack('<h', data)
|
||||
|
||||
self.props_type.append(data_types.INT16)
|
||||
self.props.append(data)
|
||||
|
||||
def add_int32(self, data):
|
||||
assert(isinstance(data, int))
|
||||
data = pack('<i', data)
|
||||
|
||||
self.props_type.append(data_types.INT32)
|
||||
self.props.append(data)
|
||||
|
||||
def add_int64(self, data):
|
||||
assert(isinstance(data, int))
|
||||
data = pack('<q', data)
|
||||
|
||||
self.props_type.append(data_types.INT64)
|
||||
self.props.append(data)
|
||||
|
||||
def add_float32(self, data):
|
||||
assert(isinstance(data, float))
|
||||
data = pack('<f', data)
|
||||
|
||||
self.props_type.append(data_types.FLOAT32)
|
||||
self.props.append(data)
|
||||
|
||||
def add_float64(self, data):
|
||||
assert(isinstance(data, float))
|
||||
data = pack('<d', data)
|
||||
|
||||
self.props_type.append(data_types.FLOAT64)
|
||||
self.props.append(data)
|
||||
|
||||
def add_bytes(self, data):
|
||||
assert(isinstance(data, bytes))
|
||||
data = pack('<I', len(data)) + data
|
||||
|
||||
self.props_type.append(data_types.BYTES)
|
||||
self.props.append(data)
|
||||
|
||||
def add_string(self, data):
|
||||
assert(isinstance(data, bytes))
|
||||
data = pack('<I', len(data)) + data
|
||||
|
||||
self.props_type.append(data_types.STRING)
|
||||
self.props.append(data)
|
||||
|
||||
def add_string_unicode(self, data):
|
||||
assert(isinstance(data, str))
|
||||
data = data.encode('utf8')
|
||||
data = pack('<I', len(data)) + data
|
||||
|
||||
self.props_type.append(data_types.STRING)
|
||||
self.props.append(data)
|
||||
|
||||
def _add_array_helper(self, data, array_type, prop_type):
|
||||
assert(isinstance(data, array.array))
|
||||
assert(data.typecode == array_type)
|
||||
|
||||
length = len(data)
|
||||
|
||||
if _IS_BIG_ENDIAN:
|
||||
data = data[:]
|
||||
data.byteswap()
|
||||
data = data.tobytes()
|
||||
|
||||
# mimic behavior of fbxconverter (also common sense)
|
||||
# we could make this configurable.
|
||||
encoding = 0 if len(data) <= 128 else 1
|
||||
if encoding == 0:
|
||||
pass
|
||||
elif encoding == 1:
|
||||
data = zlib.compress(data, 1)
|
||||
|
||||
comp_len = len(data)
|
||||
|
||||
data = pack('<3I', length, encoding, comp_len) + data
|
||||
|
||||
self.props_type.append(prop_type)
|
||||
self.props.append(data)
|
||||
|
||||
def add_int32_array(self, data):
|
||||
if not isinstance(data, array.array):
|
||||
data = array.array(data_types.ARRAY_INT32, data)
|
||||
self._add_array_helper(data, data_types.ARRAY_INT32, data_types.INT32_ARRAY)
|
||||
|
||||
def add_int64_array(self, data):
|
||||
if not isinstance(data, array.array):
|
||||
data = array.array(data_types.ARRAY_INT64, data)
|
||||
self._add_array_helper(data, data_types.ARRAY_INT64, data_types.INT64_ARRAY)
|
||||
|
||||
def add_float32_array(self, data):
|
||||
if not isinstance(data, array.array):
|
||||
data = array.array(data_types.ARRAY_FLOAT32, data)
|
||||
self._add_array_helper(data, data_types.ARRAY_FLOAT32, data_types.FLOAT32_ARRAY)
|
||||
|
||||
def add_float64_array(self, data):
|
||||
if not isinstance(data, array.array):
|
||||
data = array.array(data_types.ARRAY_FLOAT64, data)
|
||||
self._add_array_helper(data, data_types.ARRAY_FLOAT64, data_types.FLOAT64_ARRAY)
|
||||
|
||||
def add_bool_array(self, data):
|
||||
if not isinstance(data, array.array):
|
||||
data = array.array(data_types.ARRAY_BOOL, data)
|
||||
self._add_array_helper(data, data_types.ARRAY_BOOL, data_types.BOOL_ARRAY)
|
||||
|
||||
def add_byte_array(self, data):
|
||||
if not isinstance(data, array.array):
|
||||
data = array.array(data_types.ARRAY_BYTE, data)
|
||||
self._add_array_helper(data, data_types.ARRAY_BYTE, data_types.BYTE_ARRAY)
|
||||
|
||||
# -------------------------
|
||||
# internal helper functions
|
||||
|
||||
def _calc_offsets(self, offset, is_last):
|
||||
"""
|
||||
Call before writing, calculates fixed offsets.
|
||||
"""
|
||||
assert(self._end_offset == -1)
|
||||
assert(self._props_length == -1)
|
||||
|
||||
# print("Offset", offset)
|
||||
offset += 12 # 3 uints
|
||||
offset += 1 + len(self.id) # len + idname
|
||||
|
||||
props_length = 0
|
||||
for data in self.props:
|
||||
# 1 byte for the prop type
|
||||
props_length += 1 + len(data)
|
||||
self._props_length = props_length
|
||||
offset += props_length
|
||||
|
||||
offset = self._calc_offsets_children(offset, is_last)
|
||||
|
||||
self._end_offset = offset
|
||||
return offset
|
||||
|
||||
def _calc_offsets_children(self, offset, is_last):
|
||||
if self.elems:
|
||||
elem_last = self.elems[-1]
|
||||
for elem in self.elems:
|
||||
offset = elem._calc_offsets(offset, (elem is elem_last))
|
||||
offset += _BLOCK_SENTINEL_LENGTH
|
||||
elif not self.props:
|
||||
if not is_last:
|
||||
offset += _BLOCK_SENTINEL_LENGTH
|
||||
|
||||
return offset
|
||||
|
||||
def _write(self, write, tell, is_last):
|
||||
assert(self._end_offset != -1)
|
||||
assert(self._props_length != -1)
|
||||
|
||||
# print(self.id, self._end_offset, len(self.props), self._props_length)
|
||||
write(pack('<3I', self._end_offset, len(self.props), self._props_length))
|
||||
|
||||
write(bytes((len(self.id),)))
|
||||
write(self.id)
|
||||
|
||||
for i, data in enumerate(self.props):
|
||||
write(bytes((self.props_type[i],)))
|
||||
write(data)
|
||||
|
||||
self._write_children(write, tell, is_last)
|
||||
|
||||
if tell() != self._end_offset:
|
||||
raise IOError("scope length not reached, "
|
||||
"something is wrong (%d)" % (end_offset - tell()))
|
||||
|
||||
def _write_children(self, write, tell, is_last):
|
||||
if self.elems:
|
||||
elem_last = self.elems[-1]
|
||||
for elem in self.elems:
|
||||
assert(elem.id != b'')
|
||||
elem._write(write, tell, (elem is elem_last))
|
||||
write(_BLOCK_SENTINEL_DATA)
|
||||
elif not self.props:
|
||||
if not is_last:
|
||||
write(_BLOCK_SENTINEL_DATA)
|
||||
|
||||
|
||||
def _write_timedate_hack(elem_root):
|
||||
# perform 2 changes
|
||||
# - set the FileID
|
||||
# - set the CreationTime
|
||||
|
||||
ok = 0
|
||||
for elem in elem_root.elems:
|
||||
if elem.id == b'FileId':
|
||||
assert(elem.props_type[0] == b'R'[0])
|
||||
assert(len(elem.props_type) == 1)
|
||||
elem.props.clear()
|
||||
elem.props_type.clear()
|
||||
|
||||
elem.add_bytes(_FILE_ID)
|
||||
ok += 1
|
||||
elif elem.id == b'CreationTime':
|
||||
assert(elem.props_type[0] == b'S'[0])
|
||||
assert(len(elem.props_type) == 1)
|
||||
elem.props.clear()
|
||||
elem.props_type.clear()
|
||||
|
||||
elem.add_string(_TIME_ID)
|
||||
ok += 1
|
||||
|
||||
if ok == 2:
|
||||
break
|
||||
|
||||
if ok != 2:
|
||||
print("Missing fields!")
|
||||
|
||||
|
||||
def write(fn, elem_root, version):
|
||||
assert(elem_root.id == b'')
|
||||
|
||||
with open(fn, 'wb') as f:
|
||||
write = f.write
|
||||
tell = f.tell
|
||||
|
||||
write(_HEAD_MAGIC)
|
||||
write(pack('<I', version))
|
||||
|
||||
# hack since we don't decode time.
|
||||
# ideally we would _not_ modify this data.
|
||||
_write_timedate_hack(elem_root)
|
||||
|
||||
elem_root._calc_offsets_children(tell(), False)
|
||||
elem_root._write_children(write, tell, False)
|
||||
|
||||
write(_FOOT_ID)
|
||||
write(b'\x00' * 4)
|
||||
|
||||
# padding for alignment (values between 1 & 16 observed)
|
||||
# if already aligned to 16, add a full 16 bytes padding.
|
||||
ofs = tell()
|
||||
pad = ((ofs + 15) & ~15) - ofs
|
||||
if pad == 0:
|
||||
pad = 16
|
||||
|
||||
write(b'\0' * pad)
|
||||
|
||||
write(pack('<I', version))
|
||||
|
||||
# unknown magic (always the same)
|
||||
write(b'\0' * 120)
|
||||
write(b'\xf8\x5a\x8c\x6a\xde\xf5\xd9\x7e\xec\xe9\x0c\xe3\x75\x8f\x29\x0b')
|
|
@ -248,6 +248,7 @@ def save_single(operator, scene, filepath="",
|
|||
path_mode='AUTO',
|
||||
use_mesh_edges=True,
|
||||
use_default_take=True,
|
||||
**kwargs
|
||||
):
|
||||
|
||||
import bpy_extras.io_utils
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -841,7 +841,17 @@ def blen_read_light(fbx_tmpl, fbx_obj, global_scale):
|
|||
lamp = bpy.data.lamps.new(name=elem_name_utf8, type=light_type)
|
||||
|
||||
if light_type == 'SPOT':
|
||||
lamp.spot_size = math.radians(elem_props_get_number(fbx_props, b'Cone angle', 45.0))
|
||||
spot_size = elem_props_get_number(fbx_props, b'OuterAngle', None)
|
||||
if spot_size is None:
|
||||
# Deprecated.
|
||||
spot_size = elem_props_get_number(fbx_props, b'Cone angle', 45.0)
|
||||
lamp.spot_size = math.radians(spot_size)
|
||||
|
||||
spot_blend = elem_props_get_number(fbx_props, b'InnerAngle', None)
|
||||
if spot_blend is None:
|
||||
# Deprecated.
|
||||
spot_blend = elem_props_get_number(fbx_props, b'HotSpot', 45.0)
|
||||
lamp.spot_blend = 1.0 - (spot_blend / spot_size)
|
||||
|
||||
# TODO, cycles
|
||||
lamp.color = elem_props_get_color_rgb(fbx_props, b'Color', (1.0, 1.0, 1.0))
|
||||
|
|
|
@ -0,0 +1,176 @@
|
|||
#!/usr/bin/env python3
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Script copyright (C) 2014 Blender Foundation
|
||||
|
||||
"""
|
||||
Usage
|
||||
=====
|
||||
|
||||
json2fbx [FILES]...
|
||||
|
||||
This script will write a binary FBX file for each JSON argument given.
|
||||
|
||||
|
||||
Input
|
||||
======
|
||||
|
||||
The JSON data is formatted into a list of nested lists of 4 items:
|
||||
|
||||
``[id, [data, ...], "data_types", [subtree, ...]]``
|
||||
|
||||
Where each list may be empty, and the items in
|
||||
the subtree are formatted the same way.
|
||||
|
||||
data_types is a string, aligned with data that spesifies a type
|
||||
for each property.
|
||||
|
||||
The types are as follows:
|
||||
|
||||
* 'Y': - INT16
|
||||
* 'C': - BOOL
|
||||
* 'I': - INT32
|
||||
* 'F': - FLOAT32
|
||||
* 'D': - FLOAT64
|
||||
* 'L': - INT64
|
||||
* 'R': - BYTES
|
||||
* 'S': - STRING
|
||||
* 'f': - FLOAT32_ARRAY
|
||||
* 'i': - INT32_ARRAY
|
||||
* 'd': - FLOAT64_ARRAY
|
||||
* 'l': - INT64_ARRAY
|
||||
* 'b': - BOOL ARRAY
|
||||
* 'c': - BYTE ARRAY
|
||||
|
||||
Note that key:value pairs aren't used since the id's are not
|
||||
ensured to be unique.
|
||||
"""
|
||||
|
||||
|
||||
import encode_bin
|
||||
|
||||
|
||||
def elem_empty(elem, name):
|
||||
sub_elem = encode_bin.FBXElem(name)
|
||||
if elem is not None:
|
||||
elem.elems.append(sub_elem)
|
||||
return sub_elem
|
||||
|
||||
|
||||
def parse_json_rec(fbx_root, json_node):
|
||||
name, data, data_types, children = json_node
|
||||
ver = 0
|
||||
|
||||
assert(len(data_types) == len(data))
|
||||
|
||||
e = elem_empty(fbx_root, name.encode())
|
||||
for d, dt in zip(data, data_types):
|
||||
if dt == "C":
|
||||
e.add_bool(d)
|
||||
elif dt == "Y":
|
||||
e.add_int16(d)
|
||||
elif dt == "I":
|
||||
e.add_int32(d)
|
||||
elif dt == "L":
|
||||
e.add_int64(d)
|
||||
elif dt == "F":
|
||||
e.add_float32(d)
|
||||
elif dt == "D":
|
||||
e.add_float64(d)
|
||||
elif dt == "R":
|
||||
d = eval('b"""' + d + '"""')
|
||||
e.add_bytes(d)
|
||||
elif dt == "S":
|
||||
d = d.encode()
|
||||
d.replace(b"::", b"\x00\x01")
|
||||
e.add_string(d)
|
||||
elif dt == "i":
|
||||
e.add_int32_array(d)
|
||||
elif dt == "l":
|
||||
e.add_int64_array(d)
|
||||
elif dt == "f":
|
||||
e.add_float32_array(d)
|
||||
elif dt == "d":
|
||||
e.add_float64_array(d)
|
||||
elif dt == "b":
|
||||
e.add_byte_array(d)
|
||||
elif dt == "c":
|
||||
e.add_bool_array(d)
|
||||
|
||||
if name == "FBXVersion":
|
||||
assert(data_types == "I")
|
||||
ver = int(data[0])
|
||||
|
||||
for child in children:
|
||||
_ver = parse_json_rec(e, child)
|
||||
if _ver:
|
||||
ver = _ver
|
||||
|
||||
return ver
|
||||
|
||||
|
||||
def parse_json(json_root):
|
||||
root = elem_empty(None, b"")
|
||||
ver = 0
|
||||
|
||||
for n in json_root:
|
||||
_ver = parse_json_rec(root, n)
|
||||
if _ver:
|
||||
ver = _ver
|
||||
|
||||
return root, ver
|
||||
|
||||
|
||||
def json2fbx(fn):
|
||||
import os, json
|
||||
|
||||
fn_fbx = "%s.fbx" % os.path.splitext(fn)[0]
|
||||
print("Writing: %r " % fn_fbx, end="")
|
||||
json_root = []
|
||||
with open(fn) as f_json:
|
||||
json_root = json.load(f_json)
|
||||
fbx_root, fbx_version = parse_json(json_root)
|
||||
print("(Version %d) ..." % fbx_version)
|
||||
encode_bin.write(fn_fbx, fbx_root, fbx_version)
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Command Line
|
||||
|
||||
def main():
|
||||
import sys
|
||||
|
||||
if "--help" in sys.argv:
|
||||
print(__doc__)
|
||||
return
|
||||
|
||||
for arg in sys.argv[1:]:
|
||||
try:
|
||||
json2fbx(arg)
|
||||
except:
|
||||
print("Failed to convert %r, error:" % arg)
|
||||
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -32,6 +32,8 @@ from struct import unpack
|
|||
import array
|
||||
import zlib
|
||||
|
||||
from . import data_types
|
||||
|
||||
# at the end of each nested block, there is a NUL record to indicate
|
||||
# that the sub-scope exists (i.e. to distinguish between P: and P : {})
|
||||
# this NUL record is 13 bytes long.
|
||||
|
@ -87,12 +89,12 @@ read_data_dict = {
|
|||
b'L'[0]: lambda read: unpack(b'<q', read(8))[0], # 64 bit int
|
||||
b'R'[0]: lambda read: read(read_uint(read)), # binary data
|
||||
b'S'[0]: lambda read: read(read_uint(read)), # string data
|
||||
b'f'[0]: lambda read: unpack_array(read, 'f', 4, False), # array (float)
|
||||
b'i'[0]: lambda read: unpack_array(read, 'i', 4, True), # array (int)
|
||||
b'd'[0]: lambda read: unpack_array(read, 'd', 8, False), # array (double)
|
||||
b'l'[0]: lambda read: unpack_array(read, 'q', 8, True), # array (long)
|
||||
b'b'[0]: lambda read: unpack_array(read, 'b', 1, False), # array (bool)
|
||||
b'c'[0]: lambda read: unpack_array(read, 'B', 1, False), # array (ubyte)
|
||||
b'f'[0]: lambda read: unpack_array(read, data_types.ARRAY_FLOAT32, 4, False), # array (float)
|
||||
b'i'[0]: lambda read: unpack_array(read, data_types.ARRAY_INT32, 4, True), # array (int)
|
||||
b'd'[0]: lambda read: unpack_array(read, data_types.ARRAY_FLOAT64, 8, False), # array (double)
|
||||
b'l'[0]: lambda read: unpack_array(read, data_types.ARRAY_INT64, 8, True), # array (long)
|
||||
b'b'[0]: lambda read: unpack_array(read, data_types.ARRAY_BOOL, 1, False), # array (bool)
|
||||
b'c'[0]: lambda read: unpack_array(read, data_types.ARRAY_BYTE, 1, False), # array (ubyte)
|
||||
}
|
||||
|
||||
|
||||
|
@ -166,24 +168,3 @@ def parse(fn, use_namedtuple=True):
|
|||
|
||||
args = (b'', [], bytearray(0), root_elems)
|
||||
return FBXElem(*args) if use_namedtuple else args, fbx_version
|
||||
|
||||
# Inline module, only for external use
|
||||
# pyfbx.data_types
|
||||
data_types = type(array)("data_types")
|
||||
data_types.__dict__.update(
|
||||
dict(
|
||||
INT16 = b'Y'[0],
|
||||
BOOL = b'C'[0],
|
||||
INT32 = b'I'[0],
|
||||
FLOAT32 = b'F'[0],
|
||||
FLOAT64 = b'D'[0],
|
||||
INT64 = b'L'[0],
|
||||
BYTES = b'R'[0],
|
||||
STRING = b'S'[0],
|
||||
FLOAT32_ARRAY = b'f'[0],
|
||||
INT32_ARRAY = b'i'[0],
|
||||
FLOAT64_ARRAY = b'd'[0],
|
||||
INT64_ARRAY = b'l'[0],
|
||||
BOOL_ARRAY = b'b'[0],
|
||||
BYTE_ARRAY = b'c'[0],
|
||||
))
|
||||
|
|
Loading…
Reference in New Issue