FBX IO: remove experimental addon, now that 2.72 is out we moved everything to 'stable' one.

This commit is contained in:
Bastien Montagne 2014-10-08 15:43:19 +02:00
parent 4a07e805ad
commit a2c6d9a59e
10 changed files with 0 additions and 8912 deletions

View File

@ -1,493 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
bl_info = {
"name": "EXPERIMENTAL FBX format",
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier",
"version": (3, 2, 0),
"blender": (2, 72, 0),
"location": "File > Import-Export",
"description": "Experimental FBX io meshes, UV's, vertex colors, materials, "
"textures, cameras, lamps and actions",
"warning": "Use at own risks! This addon is to test new fixes and features, *not* for every-day FBX io "
"(unless you know what you are doing)",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/Autodesk_FBX",
"category": "Import-Export",
}
if "bpy" in locals():
import importlib
if "import_fbx" in locals():
importlib.reload(import_fbx)
if "export_fbx_bin" in locals():
importlib.reload(export_fbx_bin)
if "export_fbx" in locals():
importlib.reload(export_fbx)
import bpy
from bpy.props import (StringProperty,
BoolProperty,
FloatProperty,
EnumProperty,
)
from bpy_extras.io_utils import (ImportHelper,
ExportHelper,
path_reference_mode,
axis_conversion,
)
class ImportFBX_experimental(bpy.types.Operator, ImportHelper):
"""Load a FBX geometry file"""
bl_idname = "import_scene.fbx_experimental"
bl_label = "Import FBX - Experimental"
bl_options = {'UNDO', 'PRESET'}
directory = StringProperty()
filename_ext = ".fbx"
filter_glob = StringProperty(default="*.fbx", options={'HIDDEN'})
use_manual_orientation = BoolProperty(
name="Manual Orientation",
description="Specify orientation and scale, instead of using embedded data in FBX file",
default=False,
)
axis_forward = EnumProperty(
name="Forward",
items=(('X', "X Forward", ""),
('Y', "Y Forward", ""),
('Z', "Z Forward", ""),
('-X', "-X Forward", ""),
('-Y', "-Y Forward", ""),
('-Z', "-Z Forward", ""),
),
default='-Z',
)
axis_up = EnumProperty(
name="Up",
items=(('X', "X Up", ""),
('Y', "Y Up", ""),
('Z', "Z Up", ""),
('-X', "-X Up", ""),
('-Y', "-Y Up", ""),
('-Z', "-Z Up", ""),
),
default='Y',
)
global_scale = FloatProperty(
name="Scale",
min=0.001, max=1000.0,
default=1.0,
)
bake_space_transform = BoolProperty(
name="Apply Transform",
description=("Bake space transform into object data, avoids getting unwanted rotations to objects when "
"target space is not aligned with Blender's space "
"(WARNING! experimental option, might give odd/wrong results)"),
default=False,
)
use_image_search = BoolProperty(
name="Image Search",
description="Search subdirs for any associated images (Warning, may be slow)",
default=True,
)
use_alpha_decals = BoolProperty(
name="Alpha Decals",
description="Treat materials with alpha as decals (no shadow casting)",
default=False,
options={'HIDDEN'}
)
decal_offset = FloatProperty(
name="Decal Offset",
description="Displace geometry of alpha meshes",
min=0.0, max=1.0,
default=0.0,
options={'HIDDEN'}
)
use_custom_props = BoolProperty(
name="Import user properties",
description="Import user properties as custom properties",
default=True,
options={'HIDDEN'},
)
use_custom_props_enum_as_string = BoolProperty(
name="Import enum properties as string",
description="Store enumeration values as string",
default=True,
options={'HIDDEN'},
)
ignore_leaf_bones = BoolProperty(
name="Ignore leaf bones",
description="Ignore the last bone at the end of a chain that is used to mark the length of the previous bone",
default=False,
options={'HIDDEN'},
)
automatic_bone_orientation = BoolProperty(
name="Automatic Bone Orientation",
description="Try to align the major bone axis with the bone children",
default=False,
options={'HIDDEN'},
)
primary_bone_axis = EnumProperty(
name="Primary Bone Axis",
items=(('X', "X Axis", ""),
('Y', "Y Axis", ""),
('Z', "Z Axis", ""),
('-X', "-X Axis", ""),
('-Y', "-Y Axis", ""),
('-Z', "-Z Axis", ""),
),
default='Y',
)
secondary_bone_axis = EnumProperty(
name="Secondary Bone Axis",
items=(('X', "X Axis", ""),
('Y', "Y Axis", ""),
('Z', "Z Axis", ""),
('-X', "-X Axis", ""),
('-Y', "-Y Axis", ""),
('-Z', "-Z Axis", ""),
),
default='X',
)
def draw(self, context):
layout = self.layout
layout.prop(self, "use_manual_orientation"),
sub = layout.column()
sub.enabled = self.use_manual_orientation
sub.prop(self, "axis_forward")
sub.prop(self, "axis_up")
layout.prop(self, "global_scale")
layout.prop(self, "bake_space_transform")
layout.prop(self, "use_image_search")
# layout.prop(self, "use_alpha_decals")
layout.prop(self, "decal_offset")
layout.prop(self, "use_custom_props")
sub = layout.row()
sub.enabled = self.use_custom_props
sub.prop(self, "use_custom_props_enum_as_string")
layout.prop(self, "ignore_leaf_bones")
layout.prop(self, "automatic_bone_orientation"),
sub = layout.column()
sub.enabled = not self.automatic_bone_orientation
sub.prop(self, "primary_bone_axis")
sub.prop(self, "secondary_bone_axis")
def execute(self, context):
print("Using EXPERIMENTAL FBX export!")
keywords = self.as_keywords(ignore=("filter_glob", "directory"))
keywords["use_cycles"] = (context.scene.render.engine == 'CYCLES')
from . import import_fbx
return import_fbx.load(self, context, **keywords)
class ExportFBX_experimental(bpy.types.Operator, ExportHelper):
"""Selection to an ASCII Autodesk FBX"""
bl_idname = "export_scene.fbx_experimental"
bl_label = "Export FBX - Experimental"
bl_options = {'UNDO', 'PRESET'}
filename_ext = ".fbx"
filter_glob = StringProperty(default="*.fbx", options={'HIDDEN'})
# List of operator properties, the attributes will be assigned
# to the class instance from the operator settings before calling.
use_selection = BoolProperty(
name="Selected Objects",
description="Export selected objects on visible layers",
default=False,
)
global_scale = FloatProperty(
name="Scale",
description="Scale all data (Some importers do not support scaled armatures!)",
min=0.001, max=1000.0,
soft_min=0.01, soft_max=1000.0,
default=1.0,
)
axis_forward = EnumProperty(
name="Forward",
items=(('X', "X Forward", ""),
('Y', "Y Forward", ""),
('Z', "Z Forward", ""),
('-X', "-X Forward", ""),
('-Y', "-Y Forward", ""),
('-Z', "-Z Forward", ""),
),
default='-Z',
)
axis_up = EnumProperty(
name="Up",
items=(('X', "X Up", ""),
('Y', "Y Up", ""),
('Z', "Z Up", ""),
('-X', "-X Up", ""),
('-Y', "-Y Up", ""),
('-Z', "-Z Up", ""),
),
default='Y',
)
bake_space_transform = BoolProperty(
name="Apply Transform",
description=("Bake space transform into object data, avoids getting unwanted rotations to objects when "
"target space is not aligned with Blender's space "
"(WARNING! experimental option, might give odd/wrong results)"),
default=False,
)
object_types = EnumProperty(
name="Object Types",
options={'ENUM_FLAG'},
items=(('EMPTY', "Empty", ""),
('CAMERA', "Camera", ""),
('LAMP', "Lamp", ""),
('ARMATURE', "Armature", ""),
('MESH', "Mesh", ""),
('OTHER', "Other", "Other geometry types, like curve, metaball, etc. (converted to meshes)"),
),
description="Which kind of object to export",
default={'EMPTY', 'CAMERA', 'LAMP', 'ARMATURE', 'MESH', 'OTHER'},
)
use_mesh_modifiers = BoolProperty(
name="Apply Modifiers",
description="Apply modifiers to mesh objects (except Armature ones) - "
"WARNING: prevents exporting shape keys",
default=True,
)
mesh_smooth_type = EnumProperty(
name="Smoothing",
items=(('OFF', "Off", "Don't write smoothing, export normals instead"),
('FACE', "Face", "Write face smoothing"),
('EDGE', "Edge", "Write edge smoothing"),
),
description=("Export smoothing information "
"(prefer 'Off' option if your target importer understand split normals)"),
default='OFF',
)
use_mesh_edges = BoolProperty(
name="Loose Edges",
description="Export loose edges (as two-vertices polygons)",
default=False,
)
use_tspace = BoolProperty(
name="Tangent Space",
description=("Add binormal and tangent vectors, together with normal they form the tangent space "
"(will only work correctly with tris/quads only meshes!)"),
default=False,
)
use_custom_props = BoolProperty(
name="Custom Properties",
description="Export custom properties",
default=False,
)
add_leaf_bones = BoolProperty(
name="Add leaf bones",
description=("Append a last bone to the end of each chain to specify bone length. It is useful to, "
"enable this when exporting into another modelling application and to disable this when"
"exporting into a game engine or real-time viewer."),
default=True # False for commit!
)
primary_bone_axis = EnumProperty(
name="Primary Bone Axis",
items=(('X', "X Axis", ""),
('Y', "Y Axis", ""),
('Z', "Z Axis", ""),
('-X', "-X Axis", ""),
('-Y', "-Y Axis", ""),
('-Z', "-Z Axis", ""),
),
default='Y',
)
secondary_bone_axis = EnumProperty(
name="Secondary Bone Axis",
items=(('X', "X Axis", ""),
('Y', "Y Axis", ""),
('Z', "Z Axis", ""),
('-X', "-X Axis", ""),
('-Y', "-Y Axis", ""),
('-Z', "-Z Axis", ""),
),
default='X',
)
use_armature_deform_only = BoolProperty(
name="Only Deform Bones",
description="Only write deforming bones (and non-deforming ones when they have deforming children)",
default=False,
)
# Anim
bake_anim = BoolProperty(
name="Baked Animation",
description="Export baked keyframe animation",
default=True,
)
bake_anim_use_nla_strips = BoolProperty(
name="NLA Strips",
description=("Export each non-muted NLA strip as a separated FBX's AnimStack, if any, "
"instead of global scene animation"),
default=True,
)
bake_anim_use_all_actions = BoolProperty(
name="All Actions",
description=("Export each action as a separated FBX's AnimStack, "
"instead of global scene animation"),
default=True,
)
bake_anim_step = FloatProperty(
name="Sampling Rate",
description=("How often to evaluate animated values (in frames)"),
min=0.01, max=100.0,
soft_min=0.1, soft_max=10.0,
default=1.0,
)
bake_anim_simplify_factor = FloatProperty(
name="Simplify",
description=("How much to simplify baked values (0.0 to disable, the higher the more simplified"),
min=0.0, max=10.0, # No simplification to up to 0.05 slope/100 max_frame_step.
default=1.0, # default: min slope: 0.005, max frame step: 10.
)
path_mode = path_reference_mode
embed_textures = BoolProperty(
name="Embed Textures",
description="Embed textures in FBX binary file (only for \"Copy\" path mode!)",
default=False,
)
batch_mode = EnumProperty(
name="Batch Mode",
items=(('OFF', "Off", "Active scene to file"),
('SCENE', "Scene", "Each scene as a file"),
('GROUP', "Group", "Each group as a file"),
),
)
use_batch_own_dir = BoolProperty(
name="Batch Own Dir",
description="Create a dir for each exported file",
default=True,
)
use_metadata = BoolProperty(
name="Use Metadata",
default=True,
options={'HIDDEN'},
)
def draw(self, context):
layout = self.layout
layout.prop(self, "use_selection")
layout.prop(self, "global_scale")
layout.prop(self, "axis_forward")
layout.prop(self, "axis_up")
layout.prop(self, "bake_space_transform")
layout.separator()
layout.prop(self, "object_types")
layout.prop(self, "use_mesh_modifiers")
layout.prop(self, "mesh_smooth_type")
layout.prop(self, "use_mesh_edges")
sub = layout.row()
sub.enabled = self.mesh_smooth_type in {'OFF'}
sub.prop(self, "use_tspace")
layout.prop(self, "use_armature_deform_only")
layout.prop(self, "use_custom_props")
layout.prop(self, "add_leaf_bones")
layout.prop(self, "primary_bone_axis")
layout.prop(self, "secondary_bone_axis")
layout.prop(self, "bake_anim")
col = layout.column()
col.enabled = self.bake_anim
col.prop(self, "bake_anim_use_nla_strips")
col.prop(self, "bake_anim_use_all_actions")
col.prop(self, "bake_anim_step")
col.prop(self, "bake_anim_simplify_factor")
layout.separator()
layout.prop(self, "path_mode")
col = layout.column()
col.enabled = (self.path_mode == 'COPY')
col.prop(self, "embed_textures")
layout.prop(self, "batch_mode")
layout.prop(self, "use_batch_own_dir")
@property
def check_extension(self):
return self.batch_mode == 'OFF'
def execute(self, context):
from mathutils import Matrix
from . import export_fbx_bin
print("Using EXPERIMENTAL FBX import!")
if not self.filepath:
raise Exception("filepath not set")
global_matrix = (Matrix.Scale(self.global_scale, 4) *
axis_conversion(to_forward=self.axis_forward,
to_up=self.axis_up,
).to_4x4())
keywords = self.as_keywords(ignore=("global_scale",
"check_existing",
"filter_glob",
))
keywords["global_matrix"] = global_matrix
return export_fbx_bin.save(self, context, **keywords)
def menu_func_import(self, context):
self.layout.operator(ImportFBX_experimental.bl_idname, text="Experimental FBX (.fbx)")
def menu_func_export(self, context):
self.layout.operator(ExportFBX_experimental.bl_idname, text="Experimental FBX (.fbx)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_import.append(menu_func_import)
bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_import.remove(menu_func_import)
bpy.types.INFO_MT_file_export.remove(menu_func_export)
if __name__ == "__main__":
register()

View File

@ -1,540 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import bpy
__all__ = (
"CyclesShaderWrapper",
)
class CyclesShaderWrapper():
"""
Hard coded shader setup.
Suitable for importers, adds basic:
diffuse/spec/alpha/normal/bump/reflect.
"""
__slots__ = (
"material",
"node_out",
"node_mix_shader_spec",
"node_mix_shader_alpha",
"node_mix_shader_refl",
"node_bsdf_alpha",
"node_bsdf_diff",
"node_bsdf_spec",
"node_bsdf_refl",
"node_mix_color_alpha",
"node_mix_color_diff",
"node_mix_color_spec",
"node_mix_color_hard",
"node_mix_color_refl",
"node_mix_color_bump",
"node_normalmap",
"node_texcoords",
"node_image_alpha",
"node_image_diff",
"node_image_spec",
"node_image_hard",
"node_image_refl",
"node_image_bump",
"node_image_normalmap",
)
_col_size = 200
_row_size = 220
def __init__(self, material):
COLOR_WHITE = 1.0, 1.0, 1.0, 1.0
COLOR_BLACK = 0.0, 0.0, 0.0, 1.0
self.material = material
self.material.use_nodes = True
tree = self.material.node_tree
nodes = tree.nodes
links = tree.links
nodes.clear()
# ----
# Add shaders
node = nodes.new(type='ShaderNodeOutputMaterial')
node.label = "Material Out"
node.location = self._grid_location(6, 4)
self.node_out = node
del node
node = nodes.new(type='ShaderNodeAddShader')
node.label = "Shader Add Refl"
node.location = self._grid_location(5, 4)
self.node_mix_shader_refl = node
del node
# Link
links.new(self.node_mix_shader_refl.outputs["Shader"],
self.node_out.inputs["Surface"])
node = nodes.new(type='ShaderNodeAddShader')
node.label = "Shader Add Spec"
node.location = self._grid_location(4, 4)
self.node_mix_shader_spec = node
del node
# Link
links.new(self.node_mix_shader_spec.outputs["Shader"],
self.node_mix_shader_refl.inputs[0])
# --------------------------------------------------------------------
# Reflection
node = nodes.new(type='ShaderNodeBsdfRefraction')
node.label = "Refl BSDF"
node.location = self._grid_location(6, 1)
node.mute = True # unmute on use
self.node_bsdf_refl = node
del node
# Link
links.new(self.node_bsdf_refl.outputs["BSDF"],
self.node_mix_shader_refl.inputs[1])
# Mix Refl Color
node = nodes.new(type='ShaderNodeMixRGB')
node.label = "Mix Color/Refl"
node.location = self._grid_location(5, 1)
node.blend_type = 'MULTIPLY'
node.inputs["Fac"].default_value = 1.0
# reverse of most other mix nodes
node.inputs["Color1"].default_value = COLOR_WHITE # color
node.inputs["Color2"].default_value = COLOR_BLACK # factor
self.node_mix_color_refl = node
del node
# Link
links.new(self.node_mix_color_refl.outputs["Color"],
self.node_bsdf_refl.inputs["Color"])
# --------------------------------------------------------------------
# Alpha
# ----
# Mix shader
node = nodes.new(type='ShaderNodeMixShader')
node.label = "Shader Mix Alpha"
node.location = self._grid_location(3, 4)
node.inputs["Fac"].default_value = 1.0 # no alpha by default
self.node_mix_shader_alpha = node
del node
# Link
links.new(self.node_mix_shader_alpha.outputs["Shader"],
self.node_mix_shader_spec.inputs[0])
# Alpha BSDF
node = nodes.new(type='ShaderNodeBsdfTransparent')
node.label = "Alpha BSDF"
node.location = self._grid_location(2, 4)
node.mute = True # unmute on use
self.node_bsdf_alpha = node
del node
# Link
links.new(self.node_bsdf_alpha.outputs["BSDF"],
self.node_mix_shader_alpha.inputs[1]) # first 'Shader'
# Mix Alpha Color
node = nodes.new(type='ShaderNodeMixRGB')
node.label = "Mix Color/Alpha"
node.location = self._grid_location(1, 5)
node.blend_type = 'MULTIPLY'
node.inputs["Fac"].default_value = 1.0
node.inputs["Color1"].default_value = COLOR_WHITE
node.inputs["Color2"].default_value = COLOR_WHITE
self.node_mix_color_alpha = node
del node
# Link
links.new(self.node_mix_color_alpha.outputs["Color"],
self.node_mix_shader_alpha.inputs["Fac"])
# --------------------------------------------------------------------
# Diffuse
# Diffuse BSDF
node = nodes.new(type='ShaderNodeBsdfDiffuse')
node.label = "Diff BSDF"
node.location = self._grid_location(2, 3)
self.node_bsdf_diff = node
del node
# Link
links.new(self.node_bsdf_diff.outputs["BSDF"],
self.node_mix_shader_alpha.inputs[2]) # first 'Shader'
# Mix Diffuse Color
node = nodes.new(type='ShaderNodeMixRGB')
node.label = "Mix Color/Diffuse"
node.location = self._grid_location(1, 3)
node.blend_type = 'MULTIPLY'
node.inputs["Fac"].default_value = 1.0
node.inputs["Color1"].default_value = COLOR_WHITE
node.inputs["Color2"].default_value = COLOR_WHITE
self.node_mix_color_diff = node
del node
# Link
links.new(self.node_mix_color_diff.outputs["Color"],
self.node_bsdf_diff.inputs["Color"])
# --------------------------------------------------------------------
# Specular
node = nodes.new(type='ShaderNodeBsdfGlossy')
node.label = "Spec BSDF"
node.location = self._grid_location(2, 1)
node.mute = True # unmute on use
self.node_bsdf_spec = node
del node
# Link (with add shader)
links.new(self.node_bsdf_spec.outputs["BSDF"],
self.node_mix_shader_spec.inputs[1]) # second 'Shader' slot
node = nodes.new(type='ShaderNodeMixRGB')
node.label = "Mix Color/Spec"
node.location = self._grid_location(1, 1)
node.blend_type = 'MULTIPLY'
node.inputs["Fac"].default_value = 1.0
node.inputs["Color1"].default_value = COLOR_WHITE
node.inputs["Color2"].default_value = COLOR_BLACK
self.node_mix_color_spec = node
del node
# Link
links.new(self.node_mix_color_spec.outputs["Color"],
self.node_bsdf_spec.inputs["Color"])
node = nodes.new(type='ShaderNodeMixRGB')
node.label = "Mix Color/Hardness"
node.location = self._grid_location(1, 0)
node.blend_type = 'MULTIPLY'
node.inputs["Fac"].default_value = 1.0
node.inputs["Color1"].default_value = COLOR_WHITE
node.inputs["Color2"].default_value = COLOR_WHITE
self.node_mix_color_hard = node
del node
# Link
links.new(self.node_mix_color_hard.outputs["Color"],
self.node_bsdf_spec.inputs["Roughness"])
# --------------------------------------------------------------------
# Normal Map
node = nodes.new(type='ShaderNodeNormalMap')
node.label = "Normal/Map"
node.location = self._grid_location(1, 2)
node.mute = True # unmute on use
self.node_normalmap = node
del node
# Link (with diff shader)
socket_src = self.node_normalmap.outputs["Normal"]
links.new(socket_src,
self.node_bsdf_diff.inputs["Normal"])
# Link (with spec shader)
links.new(socket_src,
self.node_bsdf_spec.inputs["Normal"])
# Link (with refl shader)
links.new(socket_src,
self.node_bsdf_refl.inputs["Normal"])
del socket_src
# --------------------------------------------------------------------
# Bump Map
# Mix Refl Color
node = nodes.new(type='ShaderNodeMixRGB')
node.label = "Bump/Map"
node.location = self._grid_location(5, 3)
node.mute = True # unmute on use
node.blend_type = 'MULTIPLY'
node.inputs["Fac"].default_value = 1.0
# reverse of most other mix nodes
node.inputs["Color1"].default_value = COLOR_WHITE # color
node.inputs["Color2"].default_value = COLOR_BLACK # factor
self.node_mix_color_bump = node
del node
# Link
links.new(self.node_mix_color_bump.outputs["Color"],
self.node_out.inputs["Displacement"])
# --------------------------------------------------------------------
# Tex Coords
node = nodes.new(type='ShaderNodeTexCoord')
node.label = "Texture Coords"
node.location = self._grid_location(-3, 3)
self.node_texcoords = node
del node
# no links, only use when needed!
@staticmethod
def _image_create_helper(image, node_dst, sockets_dst, use_alpha=False):
tree = node_dst.id_data
nodes = tree.nodes
links = tree.links
node = nodes.new(type='ShaderNodeTexImage')
node.image = image
node.location = node_dst.location
node.location.x -= CyclesShaderWrapper._col_size
for socket in sockets_dst:
links.new(node.outputs["Alpha" if use_alpha else "Color"],
socket)
return node
@staticmethod
def _mapping_create_helper(node_dst, socket_src,
translation, rotation, scale, clamp):
tree = node_dst.id_data
nodes = tree.nodes
links = tree.links
# in most cases:
# (socket_src == self.node_texcoords.outputs['UV'])
node_map = None
# find an existing mapping node (allows multiple calls)
if node_dst.inputs["Vector"].links:
node_map = node_dst.inputs["Vector"].links[0].from_node
if node_map is None:
node_map = nodes.new(type='ShaderNodeMapping')
node_map.vector_type = 'TEXTURE'
node_map.location = node_dst.location
node_map.location.x -= CyclesShaderWrapper._col_size
node_map.width = 160.0
# link mapping -> image node
links.new(node_map.outputs["Vector"],
node_dst.inputs["Vector"])
# link coord -> mapping
links.new(socket_src,
node_map.inputs["Vector"])
if translation is not None:
node_map.translation = translation
if scale is not None:
node_map.scale = scale
if rotation is not None:
node_map.rotation = rotation
if clamp is not None:
# awkward conversion UV clamping to minmax
node_map.min = (0.0, 0.0, 0.0)
node_map.max = (1.0, 1.0, 1.0)
if clamp in {(False, False), (True, True)}:
node_map.use_min = node_map.use_max = clamp[0]
else:
node_map.use_min = node_map.use_max = True
# use bool as index
node_map.min[not clamp[0]] = -1000000000.0
node_map.max[not clamp[0]] = 1000000000.0
return node_map
# note, all ***_mapping_set() functions currenly work the same way
# (only with different image arg), could generalize.
@staticmethod
def _grid_location(x, y):
return (x * CyclesShaderWrapper._col_size,
y * CyclesShaderWrapper._row_size)
def diffuse_color_set(self, color):
self.node_mix_color_diff.inputs["Color1"].default_value[0:3] = color
def diffuse_image_set(self, image):
node = self.node_mix_color_diff
self.node_image_diff = (
self._image_create_helper(image, node, (node.inputs["Color2"],)))
def diffuse_mapping_set(self, coords='UV',
translation=None, rotation=None, scale=None, clamp=None):
return self._mapping_create_helper(
self.node_image_diff, self.node_texcoords.outputs[coords], translation, rotation, scale, clamp)
def specular_color_set(self, color):
self.node_bsdf_spec.mute = max(color) <= 0.0
self.node_mix_color_spec.inputs["Color1"].default_value[0:3] = color
def specular_image_set(self, image):
node = self.node_mix_color_spec
self.node_image_spec = (
self._image_create_helper(image, node, (node.inputs["Color2"],)))
def specular_mapping_set(self, coords='UV',
translation=None, rotation=None, scale=None, clamp=None):
return self._mapping_create_helper(
self.node_image_spec, self.node_texcoords.outputs[coords], translation, rotation, scale, clamp)
def hardness_value_set(self, value):
node = self.node_mix_color_hard
node.inputs["Color1"].default_value = (value,) * 4
def hardness_image_set(self, image):
node = self.node_mix_color_hard
self.node_image_hard = (
self._image_create_helper(image, node, (node.inputs["Color2"],)))
def hardness_mapping_set(self, coords='UV',
translation=None, rotation=None, scale=None, clamp=None):
return self._mapping_create_helper(
self.node_image_hard, self.node_texcoords.outputs[coords], translation, rotation, scale, clamp)
def reflect_color_set(self, color):
node = self.node_mix_color_refl
node.inputs["Color1"].default_value[0:3] = color
def reflect_factor_set(self, value):
# XXX, conflicts with image
self.node_bsdf_refl.mute = value <= 0.0
node = self.node_mix_color_refl
node.inputs["Color2"].default_value = (value,) * 4
def reflect_image_set(self, image):
self.node_bsdf_refl.mute = False
node = self.node_mix_color_refl
self.node_image_refl = (
self._image_create_helper(image, node, (node.inputs["Color2"],)))
def reflect_mapping_set(self, coords='UV',
translation=None, rotation=None, scale=None, clamp=None):
return self._mapping_create_helper(
self.node_image_refl, self.node_texcoords.outputs[coords], translation, rotation, scale, clamp)
def alpha_value_set(self, value):
self.node_bsdf_alpha.mute &= (value >= 1.0)
node = self.node_mix_color_alpha
node.inputs["Color1"].default_value = (value,) * 4
def alpha_image_set(self, image):
self.node_bsdf_alpha.mute = False
node = self.node_mix_color_alpha
# note: use_alpha may need to be configurable
# its not always the case that alpha channels use the image alpha
# a greyscale image may also be used.
self.node_image_alpha = (
self._image_create_helper(image, node, (node.inputs["Color2"],), use_alpha=True))
def alpha_mapping_set(self, coords='UV',
translation=None, rotation=None, scale=None, clamp=None):
return self._mapping_create_helper(
self.node_image_alpha, self.node_texcoords.outputs[coords], translation, rotation, scale, clamp)
def alpha_image_set_from_diffuse(self):
# XXX, remove?
tree = self.node_mix_color_diff.id_data
links = tree.links
self.node_bsdf_alpha.mute = False
node_image = self.node_image_diff
node = self.node_mix_color_alpha
if 1:
links.new(node_image.outputs["Alpha"],
node.inputs["Color2"])
else:
self.alpha_image_set(node_image.image)
self.node_image_alpha.label = "Image Texture_ALPHA"
def normal_factor_set(self, value):
node = self.node_normalmap
node.inputs["Strength"].default_value = value
def normal_image_set(self, image):
self.node_normalmap.mute = False
node = self.node_normalmap
self.node_image_normalmap = (
self._image_create_helper(image, node, (node.inputs["Color"],)))
self.node_image_normalmap.color_space = 'NONE'
def normal_mapping_set(self, coords='UV',
translation=None, rotation=None, scale=None, clamp=None):
return self._mapping_create_helper(
self.node_image_normalmap, self.node_texcoords.outputs[coords], translation, rotation, scale, clamp)
def bump_factor_set(self, value):
node = self.node_mix_color_bump
node.mute = (value <= 0.0)
node.inputs["Color1"].default_value = (value,) * 4
def bump_image_set(self, image):
node = self.node_mix_color_bump
self.node_image_bump = (
self._image_create_helper(image, node, (node.inputs["Color2"],)))
def bump_mapping_set(self, coords='UV',
translation=None, rotation=None, scale=None, clamp=None):
return self._mapping_create_helper(
self.node_image_bump, self.node_texcoords.outputs[coords], translation, rotation, scale, clamp)
def mapping_set_from_diffuse(self,
specular=True,
hardness=True,
reflect=True,
alpha=True,
normal=True,
bump=True):
"""
Set all mapping based on diffuse
(sometimes we want to assume default mapping follows diffuse).
"""
# get mapping from diffuse
if not hasattr(self, "node_image_diff"):
return
links = self.node_image_diff.inputs["Vector"].links
if not links:
return
mapping_out_socket = links[0].from_socket
tree = self.material.node_tree
links = tree.links
def node_image_mapping_apply(node_image_attr):
# ensure strings are valid attrs
assert(node_image_attr in self.__slots__)
node_image = getattr(self, node_image_attr, None)
if node_image is not None:
node_image_input_socket = node_image.inputs["Vector"]
# don't overwrite existing sockets
if not node_image_input_socket.links:
links.new(mapping_out_socket,
node_image_input_socket)
if specular:
node_image_mapping_apply("node_image_spec")
if hardness:
node_image_mapping_apply("node_image_hard")
if reflect:
node_image_mapping_apply("node_image_refl")
if alpha:
node_image_mapping_apply("node_image_alpha")
if normal:
node_image_mapping_apply("node_image_normalmap")
if bump:
node_image_mapping_apply("node_image_bump")

View File

@ -1,74 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Script copyright (C) 2006-2012, assimp team
# Script copyright (C) 2013 Blender Foundation
BOOL = b'C'[0]
INT16 = b'Y'[0]
INT32 = b'I'[0]
INT64 = b'L'[0]
FLOAT32 = b'F'[0]
FLOAT64 = b'D'[0]
BYTES = b'R'[0]
STRING = b'S'[0]
INT32_ARRAY = b'i'[0]
INT64_ARRAY = b'l'[0]
FLOAT32_ARRAY = b'f'[0]
FLOAT64_ARRAY = b'd'[0]
BOOL_ARRAY = b'b'[0]
BYTE_ARRAY = b'c'[0]
# array types - actual length may vary (depending on underlying C implementation)!
import array
# For now, bytes and bool are assumed always 1byte.
ARRAY_BOOL = 'b'
ARRAY_BYTE = 'B'
ARRAY_INT32 = None
ARRAY_INT64 = None
for _t in 'ilq':
size = array.array(_t).itemsize
if size == 4:
ARRAY_INT32 = _t
elif size == 8:
ARRAY_INT64 = _t
if ARRAY_INT32 and ARRAY_INT64:
break
if not ARRAY_INT32:
raise Exception("Impossible to get a 4-bytes integer type for array!")
if not ARRAY_INT64:
raise Exception("Impossible to get an 8-bytes integer type for array!")
ARRAY_FLOAT32 = None
ARRAY_FLOAT64 = None
for _t in 'fd':
size = array.array(_t).itemsize
if size == 4:
ARRAY_FLOAT32 = _t
elif size == 8:
ARRAY_FLOAT64 = _t
if ARRAY_FLOAT32 and ARRAY_FLOAT64:
break
if not ARRAY_FLOAT32:
raise Exception("Impossible to get a 4-bytes float type for array!")
if not ARRAY_FLOAT64:
raise Exception("Impossible to get an 8-bytes float type for array!")

View File

@ -1,321 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Script copyright (C) 2013 Campbell Barton
try:
from . import data_types
except:
import data_types
from struct import pack
import array
import zlib
_BLOCK_SENTINEL_LENGTH = 13
_BLOCK_SENTINEL_DATA = (b'\0' * _BLOCK_SENTINEL_LENGTH)
_IS_BIG_ENDIAN = (__import__("sys").byteorder != 'little')
_HEAD_MAGIC = b'Kaydara FBX Binary\x20\x20\x00\x1a\x00'
# fbx has very strict CRC rules, all based on file timestamp
# until we figure these out, write files at a fixed time. (workaround!)
# Assumes: CreationTime
_TIME_ID = b'1970-01-01 10:00:00:000'
_FILE_ID = b'\x28\xb3\x2a\xeb\xb6\x24\xcc\xc2\xbf\xc8\xb0\x2a\xa9\x2b\xfc\xf1'
_FOOT_ID = b'\xfa\xbc\xab\x09\xd0\xc8\xd4\x66\xb1\x76\xfb\x83\x1c\xf7\x26\x7e'
# Awful exceptions: those "classes" of elements seem to need block sentinel even when having no children and some props.
_ELEMS_ID_ALWAYS_BLOCK_SENTINEL = {b"AnimationStack", b"AnimationLayer"}
class FBXElem:
__slots__ = (
"id",
"props",
"props_type",
"elems",
"_props_length", # combine length of props
"_end_offset", # byte offset from the start of the file.
)
def __init__(self, id):
assert(len(id) < 256) # length must fit in a uint8
self.id = id
self.props = []
self.props_type = bytearray()
self.elems = []
self._end_offset = -1
self._props_length = -1
def add_bool(self, data):
assert(isinstance(data, bool))
data = pack('?', data)
self.props_type.append(data_types.BOOL)
self.props.append(data)
def add_int16(self, data):
assert(isinstance(data, int))
data = pack('<h', data)
self.props_type.append(data_types.INT16)
self.props.append(data)
def add_int32(self, data):
assert(isinstance(data, int))
data = pack('<i', data)
self.props_type.append(data_types.INT32)
self.props.append(data)
def add_int64(self, data):
assert(isinstance(data, int))
data = pack('<q', data)
self.props_type.append(data_types.INT64)
self.props.append(data)
def add_float32(self, data):
assert(isinstance(data, float))
data = pack('<f', data)
self.props_type.append(data_types.FLOAT32)
self.props.append(data)
def add_float64(self, data):
assert(isinstance(data, float))
data = pack('<d', data)
self.props_type.append(data_types.FLOAT64)
self.props.append(data)
def add_bytes(self, data):
assert(isinstance(data, bytes))
data = pack('<I', len(data)) + data
self.props_type.append(data_types.BYTES)
self.props.append(data)
def add_string(self, data):
assert(isinstance(data, bytes))
data = pack('<I', len(data)) + data
self.props_type.append(data_types.STRING)
self.props.append(data)
def add_string_unicode(self, data):
assert(isinstance(data, str))
data = data.encode('utf8')
data = pack('<I', len(data)) + data
self.props_type.append(data_types.STRING)
self.props.append(data)
def _add_array_helper(self, data, array_type, prop_type):
assert(isinstance(data, array.array))
assert(data.typecode == array_type)
length = len(data)
if _IS_BIG_ENDIAN:
data = data[:]
data.byteswap()
data = data.tobytes()
# mimic behavior of fbxconverter (also common sense)
# we could make this configurable.
encoding = 0 if len(data) <= 128 else 1
if encoding == 0:
pass
elif encoding == 1:
data = zlib.compress(data, 1)
comp_len = len(data)
data = pack('<3I', length, encoding, comp_len) + data
self.props_type.append(prop_type)
self.props.append(data)
def add_int32_array(self, data):
if not isinstance(data, array.array):
data = array.array(data_types.ARRAY_INT32, data)
self._add_array_helper(data, data_types.ARRAY_INT32, data_types.INT32_ARRAY)
def add_int64_array(self, data):
if not isinstance(data, array.array):
data = array.array(data_types.ARRAY_INT64, data)
self._add_array_helper(data, data_types.ARRAY_INT64, data_types.INT64_ARRAY)
def add_float32_array(self, data):
if not isinstance(data, array.array):
data = array.array(data_types.ARRAY_FLOAT32, data)
self._add_array_helper(data, data_types.ARRAY_FLOAT32, data_types.FLOAT32_ARRAY)
def add_float64_array(self, data):
if not isinstance(data, array.array):
data = array.array(data_types.ARRAY_FLOAT64, data)
self._add_array_helper(data, data_types.ARRAY_FLOAT64, data_types.FLOAT64_ARRAY)
def add_bool_array(self, data):
if not isinstance(data, array.array):
data = array.array(data_types.ARRAY_BOOL, data)
self._add_array_helper(data, data_types.ARRAY_BOOL, data_types.BOOL_ARRAY)
def add_byte_array(self, data):
if not isinstance(data, array.array):
data = array.array(data_types.ARRAY_BYTE, data)
self._add_array_helper(data, data_types.ARRAY_BYTE, data_types.BYTE_ARRAY)
# -------------------------
# internal helper functions
def _calc_offsets(self, offset, is_last):
"""
Call before writing, calculates fixed offsets.
"""
assert(self._end_offset == -1)
assert(self._props_length == -1)
offset += 12 # 3 uints
offset += 1 + len(self.id) # len + idname
props_length = 0
for data in self.props:
# 1 byte for the prop type
props_length += 1 + len(data)
self._props_length = props_length
offset += props_length
offset = self._calc_offsets_children(offset, is_last)
self._end_offset = offset
return offset
def _calc_offsets_children(self, offset, is_last):
if self.elems:
elem_last = self.elems[-1]
for elem in self.elems:
offset = elem._calc_offsets(offset, (elem is elem_last))
offset += _BLOCK_SENTINEL_LENGTH
elif not self.props or self.id in _ELEMS_ID_ALWAYS_BLOCK_SENTINEL:
if not is_last:
offset += _BLOCK_SENTINEL_LENGTH
return offset
def _write(self, write, tell, is_last):
assert(self._end_offset != -1)
assert(self._props_length != -1)
write(pack('<3I', self._end_offset, len(self.props), self._props_length))
write(bytes((len(self.id),)))
write(self.id)
for i, data in enumerate(self.props):
write(bytes((self.props_type[i],)))
write(data)
self._write_children(write, tell, is_last)
if tell() != self._end_offset:
raise IOError("scope length not reached, "
"something is wrong (%d)" % (end_offset - tell()))
def _write_children(self, write, tell, is_last):
if self.elems:
elem_last = self.elems[-1]
for elem in self.elems:
assert(elem.id != b'')
elem._write(write, tell, (elem is elem_last))
write(_BLOCK_SENTINEL_DATA)
elif not self.props or self.id in _ELEMS_ID_ALWAYS_BLOCK_SENTINEL:
if not is_last:
write(_BLOCK_SENTINEL_DATA)
def _write_timedate_hack(elem_root):
# perform 2 changes
# - set the FileID
# - set the CreationTime
ok = 0
for elem in elem_root.elems:
if elem.id == b'FileId':
assert(elem.props_type[0] == b'R'[0])
assert(len(elem.props_type) == 1)
elem.props.clear()
elem.props_type.clear()
elem.add_bytes(_FILE_ID)
ok += 1
elif elem.id == b'CreationTime':
assert(elem.props_type[0] == b'S'[0])
assert(len(elem.props_type) == 1)
elem.props.clear()
elem.props_type.clear()
elem.add_string(_TIME_ID)
ok += 1
if ok == 2:
break
if ok != 2:
print("Missing fields!")
def write(fn, elem_root, version):
assert(elem_root.id == b'')
with open(fn, 'wb') as f:
write = f.write
tell = f.tell
write(_HEAD_MAGIC)
write(pack('<I', version))
# hack since we don't decode time.
# ideally we would _not_ modify this data.
_write_timedate_hack(elem_root)
elem_root._calc_offsets_children(tell(), False)
elem_root._write_children(write, tell, False)
write(_FOOT_ID)
write(b'\x00' * 4)
# padding for alignment (values between 1 & 16 observed)
# if already aligned to 16, add a full 16 bytes padding.
ofs = tell()
pad = ((ofs + 15) & ~15) - ofs
if pad == 0:
pad = 16
write(b'\0' * pad)
write(pack('<I', version))
# unknown magic (always the same)
write(b'\0' * 120)
write(b'\xf8\x5a\x8c\x6a\xde\xf5\xd9\x7e\xec\xe9\x0c\xe3\x75\x8f\x29\x0b')

File diff suppressed because it is too large Load Diff

View File

@ -1,330 +0,0 @@
#!/usr/bin/env python3
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Script copyright (C) 2006-2012, assimp team
# Script copyright (C) 2013 Blender Foundation
"""
Usage
=====
fbx2json [FILES]...
This script will write a JSON file for each FBX argument given.
Output
======
The JSON data is formatted into a list of nested lists of 4 items:
``[id, [data, ...], "data_types", [subtree, ...]]``
Where each list may be empty, and the items in
the subtree are formatted the same way.
data_types is a string, aligned with data that spesifies a type
for each property.
The types are as follows:
* 'Y': - INT16
* 'C': - BOOL
* 'I': - INT32
* 'F': - FLOAT32
* 'D': - FLOAT64
* 'L': - INT64
* 'R': - BYTES
* 'S': - STRING
* 'f': - FLOAT32_ARRAY
* 'i': - INT32_ARRAY
* 'd': - FLOAT64_ARRAY
* 'l': - INT64_ARRAY
* 'b': - BOOL ARRAY
* 'c': - BYTE ARRAY
Note that key:value pairs aren't used since the id's are not
ensured to be unique.
"""
# ----------------------------------------------------------------------------
# FBX Binary Parser
from struct import unpack
import array
import zlib
# at the end of each nested block, there is a NUL record to indicate
# that the sub-scope exists (i.e. to distinguish between P: and P : {})
# this NUL record is 13 bytes long.
_BLOCK_SENTINEL_LENGTH = 13
_BLOCK_SENTINEL_DATA = (b'\0' * _BLOCK_SENTINEL_LENGTH)
_IS_BIG_ENDIAN = (__import__("sys").byteorder != 'little')
_HEAD_MAGIC = b'Kaydara FBX Binary\x20\x20\x00\x1a\x00'
from collections import namedtuple
FBXElem = namedtuple("FBXElem", ("id", "props", "props_type", "elems"))
del namedtuple
def read_uint(read):
return unpack(b'<I', read(4))[0]
def read_ubyte(read):
return unpack(b'B', read(1))[0]
def read_string_ubyte(read):
size = read_ubyte(read)
data = read(size)
return data
def unpack_array(read, array_type, array_stride, array_byteswap):
length = read_uint(read)
encoding = read_uint(read)
comp_len = read_uint(read)
data = read(comp_len)
if encoding == 0:
pass
elif encoding == 1:
data = zlib.decompress(data)
assert(length * array_stride == len(data))
data_array = array.array(array_type, data)
if array_byteswap and _IS_BIG_ENDIAN:
data_array.byteswap()
return data_array
read_data_dict = {
b'Y'[0]: lambda read: unpack(b'<h', read(2))[0], # 16 bit int
b'C'[0]: lambda read: unpack(b'?', read(1))[0], # 1 bit bool (yes/no)
b'I'[0]: lambda read: unpack(b'<i', read(4))[0], # 32 bit int
b'F'[0]: lambda read: unpack(b'<f', read(4))[0], # 32 bit float
b'D'[0]: lambda read: unpack(b'<d', read(8))[0], # 64 bit float
b'L'[0]: lambda read: unpack(b'<q', read(8))[0], # 64 bit int
b'R'[0]: lambda read: read(read_uint(read)), # binary data
b'S'[0]: lambda read: read(read_uint(read)), # string data
b'f'[0]: lambda read: unpack_array(read, 'f', 4, False), # array (float)
b'i'[0]: lambda read: unpack_array(read, 'i', 4, True), # array (int)
b'd'[0]: lambda read: unpack_array(read, 'd', 8, False), # array (double)
b'l'[0]: lambda read: unpack_array(read, 'q', 8, True), # array (long)
b'b'[0]: lambda read: unpack_array(read, 'b', 1, False), # array (bool)
b'c'[0]: lambda read: unpack_array(read, 'B', 1, False), # array (ubyte)
}
def read_elem(read, tell, use_namedtuple):
# [0] the offset at which this block ends
# [1] the number of properties in the scope
# [2] the length of the property list
end_offset = read_uint(read)
if end_offset == 0:
return None
prop_count = read_uint(read)
prop_length = read_uint(read)
elem_id = read_string_ubyte(read) # elem name of the scope/key
elem_props_type = bytearray(prop_count) # elem property types
elem_props_data = [None] * prop_count # elem properties (if any)
elem_subtree = [] # elem children (if any)
for i in range(prop_count):
data_type = read(1)[0]
elem_props_data[i] = read_data_dict[data_type](read)
elem_props_type[i] = data_type
if tell() < end_offset:
while tell() < (end_offset - _BLOCK_SENTINEL_LENGTH):
elem_subtree.append(read_elem(read, tell, use_namedtuple))
if read(_BLOCK_SENTINEL_LENGTH) != _BLOCK_SENTINEL_DATA:
raise IOError("failed to read nested block sentinel, "
"expected all bytes to be 0")
if tell() != end_offset:
raise IOError("scope length not reached, something is wrong")
args = (elem_id, elem_props_data, elem_props_type, elem_subtree)
return FBXElem(*args) if use_namedtuple else args
def parse_version(fn):
"""
Return the FBX version,
if the file isn't a binary FBX return zero.
"""
with open(fn, 'rb') as f:
read = f.read
if read(len(_HEAD_MAGIC)) != _HEAD_MAGIC:
return 0
return read_uint(read)
def parse(fn, use_namedtuple=True):
root_elems = []
with open(fn, 'rb') as f:
read = f.read
tell = f.tell
if read(len(_HEAD_MAGIC)) != _HEAD_MAGIC:
raise IOError("Invalid header")
fbx_version = read_uint(read)
while True:
elem = read_elem(read, tell, use_namedtuple)
if elem is None:
break
root_elems.append(elem)
args = (b'', [], bytearray(0), root_elems)
return FBXElem(*args) if use_namedtuple else args, fbx_version
# ----------------------------------------------------------------------------
# Inline Modules
# pyfbx.data_types
data_types = type(array)("data_types")
data_types.__dict__.update(
dict(
INT16 = b'Y'[0],
BOOL = b'C'[0],
INT32 = b'I'[0],
FLOAT32 = b'F'[0],
FLOAT64 = b'D'[0],
INT64 = b'L'[0],
BYTES = b'R'[0],
STRING = b'S'[0],
FLOAT32_ARRAY = b'f'[0],
INT32_ARRAY = b'i'[0],
FLOAT64_ARRAY = b'd'[0],
INT64_ARRAY = b'l'[0],
BOOL_ARRAY = b'b'[0],
BYTE_ARRAY = b'c'[0],
))
# pyfbx.parse_bin
parse_bin = type(array)("parse_bin")
parse_bin.__dict__.update(
dict(
parse = parse
))
# ----------------------------------------------------------------------------
# JSON Converter
# from pyfbx import parse_bin, data_types
import json
import array
def fbx2json_property_as_string(prop, prop_type):
if prop_type == data_types.STRING:
prop_str = prop.decode('utf-8')
prop_str = prop_str.replace('\x00\x01', '::')
return json.dumps(prop_str)
else:
prop_py_type = type(prop)
if prop_py_type == bytes:
return json.dumps(repr(prop)[2:-1])
elif prop_py_type == bool:
return json.dumps(prop)
elif prop_py_type == array.array:
return repr(list(prop))
return repr(prop)
def fbx2json_properties_as_string(fbx_elem):
return ", ".join(fbx2json_property_as_string(*prop_item)
for prop_item in zip(fbx_elem.props,
fbx_elem.props_type))
def fbx2json_recurse(fw, fbx_elem, ident, is_last):
fbx_elem_id = fbx_elem.id.decode('utf-8')
fw('%s["%s", ' % (ident, fbx_elem_id))
fw('[%s], ' % fbx2json_properties_as_string(fbx_elem))
fw('"%s", ' % (fbx_elem.props_type.decode('ascii')))
fw('[')
if fbx_elem.elems:
fw('\n')
ident_sub = ident + " "
for fbx_elem_sub in fbx_elem.elems:
fbx2json_recurse(fw, fbx_elem_sub, ident_sub,
fbx_elem_sub is fbx_elem.elems[-1])
fw(']')
fw(']%s' % ('' if is_last else ',\n'))
def fbx2json(fn):
import os
fn_json = "%s.json" % os.path.splitext(fn)[0]
print("Writing: %r " % fn_json, end="")
fbx_root_elem, fbx_version = parse(fn, use_namedtuple=True)
print("(Version %d) ..." % fbx_version)
with open(fn_json, 'w', encoding="ascii", errors='xmlcharrefreplace') as f:
fw = f.write
fw('[\n')
ident_sub = " "
for fbx_elem_sub in fbx_root_elem.elems:
fbx2json_recurse(f.write, fbx_elem_sub, ident_sub,
fbx_elem_sub is fbx_root_elem.elems[-1])
fw(']\n')
# ----------------------------------------------------------------------------
# Command Line
def main():
import sys
if "--help" in sys.argv:
print(__doc__)
return
for arg in sys.argv[1:]:
try:
fbx2json(arg)
except:
print("Failed to convert %r, error:" % arg)
import traceback
traceback.print_exc()
if __name__ == "__main__":
main()

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,176 +0,0 @@
#!/usr/bin/env python3
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Script copyright (C) 2014 Blender Foundation
"""
Usage
=====
json2fbx [FILES]...
This script will write a binary FBX file for each JSON argument given.
Input
======
The JSON data is formatted into a list of nested lists of 4 items:
``[id, [data, ...], "data_types", [subtree, ...]]``
Where each list may be empty, and the items in
the subtree are formatted the same way.
data_types is a string, aligned with data that spesifies a type
for each property.
The types are as follows:
* 'Y': - INT16
* 'C': - BOOL
* 'I': - INT32
* 'F': - FLOAT32
* 'D': - FLOAT64
* 'L': - INT64
* 'R': - BYTES
* 'S': - STRING
* 'f': - FLOAT32_ARRAY
* 'i': - INT32_ARRAY
* 'd': - FLOAT64_ARRAY
* 'l': - INT64_ARRAY
* 'b': - BOOL ARRAY
* 'c': - BYTE ARRAY
Note that key:value pairs aren't used since the id's are not
ensured to be unique.
"""
import encode_bin
def elem_empty(elem, name):
sub_elem = encode_bin.FBXElem(name)
if elem is not None:
elem.elems.append(sub_elem)
return sub_elem
def parse_json_rec(fbx_root, json_node):
name, data, data_types, children = json_node
ver = 0
assert(len(data_types) == len(data))
e = elem_empty(fbx_root, name.encode())
for d, dt in zip(data, data_types):
if dt == "C":
e.add_bool(d)
elif dt == "Y":
e.add_int16(d)
elif dt == "I":
e.add_int32(d)
elif dt == "L":
e.add_int64(d)
elif dt == "F":
e.add_float32(d)
elif dt == "D":
e.add_float64(d)
elif dt == "R":
d = eval('b"""' + d + '"""')
e.add_bytes(d)
elif dt == "S":
d = d.encode().replace(b"::", b"\x00\x01")
e.add_string(d)
elif dt == "i":
e.add_int32_array(d)
elif dt == "l":
e.add_int64_array(d)
elif dt == "f":
e.add_float32_array(d)
elif dt == "d":
e.add_float64_array(d)
elif dt == "b":
e.add_byte_array(d)
elif dt == "c":
e.add_bool_array(d)
if name == "FBXVersion":
assert(data_types == "I")
ver = int(data[0])
for child in children:
_ver = parse_json_rec(e, child)
if _ver:
ver = _ver
return ver
def parse_json(json_root):
root = elem_empty(None, b"")
ver = 0
for n in json_root:
_ver = parse_json_rec(root, n)
if _ver:
ver = _ver
return root, ver
def json2fbx(fn):
import os
import json
fn_fbx = "%s.fbx" % os.path.splitext(fn)[0]
print("Writing: %r " % fn_fbx, end="")
json_root = []
with open(fn) as f_json:
json_root = json.load(f_json)
fbx_root, fbx_version = parse_json(json_root)
print("(Version %d) ..." % fbx_version)
encode_bin.write(fn_fbx, fbx_root, fbx_version)
# ----------------------------------------------------------------------------
# Command Line
def main():
import sys
if "--help" in sys.argv:
print(__doc__)
return
for arg in sys.argv[1:]:
try:
json2fbx(arg)
except:
print("Failed to convert %r, error:" % arg)
import traceback
traceback.print_exc()
if __name__ == "__main__":
main()

View File

@ -1,170 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Script copyright (C) 2006-2012, assimp team
# Script copyright (C) 2013 Blender Foundation
__all__ = (
"parse",
"data_types",
"parse_version",
"FBXElem",
)
from struct import unpack
import array
import zlib
from . import data_types
# at the end of each nested block, there is a NUL record to indicate
# that the sub-scope exists (i.e. to distinguish between P: and P : {})
# this NUL record is 13 bytes long.
_BLOCK_SENTINEL_LENGTH = 13
_BLOCK_SENTINEL_DATA = (b'\0' * _BLOCK_SENTINEL_LENGTH)
_IS_BIG_ENDIAN = (__import__("sys").byteorder != 'little')
_HEAD_MAGIC = b'Kaydara FBX Binary\x20\x20\x00\x1a\x00'
from collections import namedtuple
FBXElem = namedtuple("FBXElem", ("id", "props", "props_type", "elems"))
del namedtuple
def read_uint(read):
return unpack(b'<I', read(4))[0]
def read_ubyte(read):
return unpack(b'B', read(1))[0]
def read_string_ubyte(read):
size = read_ubyte(read)
data = read(size)
return data
def unpack_array(read, array_type, array_stride, array_byteswap):
length = read_uint(read)
encoding = read_uint(read)
comp_len = read_uint(read)
data = read(comp_len)
if encoding == 0:
pass
elif encoding == 1:
data = zlib.decompress(data)
assert(length * array_stride == len(data))
data_array = array.array(array_type, data)
if array_byteswap and _IS_BIG_ENDIAN:
data_array.byteswap()
return data_array
read_data_dict = {
b'Y'[0]: lambda read: unpack(b'<h', read(2))[0], # 16 bit int
b'C'[0]: lambda read: unpack(b'?', read(1))[0], # 1 bit bool (yes/no)
b'I'[0]: lambda read: unpack(b'<i', read(4))[0], # 32 bit int
b'F'[0]: lambda read: unpack(b'<f', read(4))[0], # 32 bit float
b'D'[0]: lambda read: unpack(b'<d', read(8))[0], # 64 bit float
b'L'[0]: lambda read: unpack(b'<q', read(8))[0], # 64 bit int
b'R'[0]: lambda read: read(read_uint(read)), # binary data
b'S'[0]: lambda read: read(read_uint(read)), # string data
b'f'[0]: lambda read: unpack_array(read, data_types.ARRAY_FLOAT32, 4, False), # array (float)
b'i'[0]: lambda read: unpack_array(read, data_types.ARRAY_INT32, 4, True), # array (int)
b'd'[0]: lambda read: unpack_array(read, data_types.ARRAY_FLOAT64, 8, False), # array (double)
b'l'[0]: lambda read: unpack_array(read, data_types.ARRAY_INT64, 8, True), # array (long)
b'b'[0]: lambda read: unpack_array(read, data_types.ARRAY_BOOL, 1, False), # array (bool)
b'c'[0]: lambda read: unpack_array(read, data_types.ARRAY_BYTE, 1, False), # array (ubyte)
}
def read_elem(read, tell, use_namedtuple):
# [0] the offset at which this block ends
# [1] the number of properties in the scope
# [2] the length of the property list
end_offset = read_uint(read)
if end_offset == 0:
return None
prop_count = read_uint(read)
prop_length = read_uint(read)
elem_id = read_string_ubyte(read) # elem name of the scope/key
elem_props_type = bytearray(prop_count) # elem property types
elem_props_data = [None] * prop_count # elem properties (if any)
elem_subtree = [] # elem children (if any)
for i in range(prop_count):
data_type = read(1)[0]
elem_props_data[i] = read_data_dict[data_type](read)
elem_props_type[i] = data_type
if tell() < end_offset:
while tell() < (end_offset - _BLOCK_SENTINEL_LENGTH):
elem_subtree.append(read_elem(read, tell, use_namedtuple))
if read(_BLOCK_SENTINEL_LENGTH) != _BLOCK_SENTINEL_DATA:
raise IOError("failed to read nested block sentinel, "
"expected all bytes to be 0")
if tell() != end_offset:
raise IOError("scope length not reached, something is wrong")
args = (elem_id, elem_props_data, elem_props_type, elem_subtree)
return FBXElem(*args) if use_namedtuple else args
def parse_version(fn):
"""
Return the FBX version,
if the file isn't a binary FBX return zero.
"""
with open(fn, 'rb') as f:
read = f.read
if read(len(_HEAD_MAGIC)) != _HEAD_MAGIC:
return 0
return read_uint(read)
def parse(fn, use_namedtuple=True):
root_elems = []
with open(fn, 'rb') as f:
read = f.read
tell = f.tell
if read(len(_HEAD_MAGIC)) != _HEAD_MAGIC:
raise IOError("Invalid header")
fbx_version = read_uint(read)
while True:
elem = read_elem(read, tell, use_namedtuple)
if elem is None:
break
root_elems.append(elem)
args = (b'', [], bytearray(0), root_elems)
return FBXElem(*args) if use_namedtuple else args, fbx_version