+58
.gitignore
+58
.gitignore
···
1
+
*/source
2
+
*/backup
3
+
4
+
*/update_staging
5
+
6
+
XNALara-io-Tools_updater_status.json
7
+
XNALara-io-Tools.zip
8
+
9
+
10
+
*.zip
11
+
12
+
13
+
14
+
15
+
Thumbs.db
16
+
ehthumbs.db
17
+
Desktop.ini
18
+
$RECYCLE.BIN/
19
+
20
+
21
+
*.cab
22
+
*.msi
23
+
*.msm
24
+
*.msp
25
+
*.lnk
26
+
.DS_Store
27
+
.AppleDouble
28
+
.LSOverride
29
+
._*
30
+
31
+
32
+
.Spotlight-V100
33
+
.Trashes
34
+
35
+
36
+
.AppleDB
37
+
.AppleDesktop
38
+
Network Trash Folder
39
+
Temporary Items
40
+
.apdisk
41
+
42
+
43
+
__pycache__/
44
+
webspynner/__pychache__/
45
+
*.py[cod]
46
+
*.pyc
47
+
48
+
49
+
.pydevproject
50
+
.project
51
+
.settings/
52
+
53
+
54
+
*~
55
+
56
+
57
+
XNALara-io-Tools/modules/addon_updater_system/xnalara-io-tools.modules.addon_updater_system_updater/XNALara-io-Tools.modules.addon_updater_system_updater_status.json
58
+
XNALara-io-Tools/modules/addon_updater_system/_updater/XNALara-io-Tools.modules.addon_updater_system_updater_status.json
+6
.gitmodules
+6
.gitmodules
···
1
+
[submodule "XNALara-io-Tools/modules/ALXModuleManager"]
2
+
path = XNALara-io-Tools/modules/ALXModuleManager
3
+
url = https://tangled.sh/@valerie-bosco.tngl.sh/ALXModuleManager.git
4
+
[submodule "XNALara-io-Tools/modules/ALXAddonUpdater"]
5
+
path = XNALara-io-Tools/modules/ALXAddonUpdater
6
+
url = https://tangled.sh/@valerie-bosco.tngl.sh/ALXAddonUpdater.git
+2
-1
ALXAddonUpdater/ALX_AddonUpdater.py
xnalara_io_Tools/modules/ALXAddonUpdater/ALXAddonUpdater/ALX_AddonUpdater.py
+2
-1
ALXAddonUpdater/ALX_AddonUpdater.py
xnalara_io_Tools/modules/ALXAddonUpdater/ALXAddonUpdater/ALX_AddonUpdater.py
ALXAddonUpdater/ALX_AddonUpdaterEngine.py
xnalara_io_Tools/modules/ALXAddonUpdater/ALXAddonUpdater/ALX_AddonUpdaterEngine.py
ALXAddonUpdater/ALX_AddonUpdaterEngine.py
xnalara_io_Tools/modules/ALXAddonUpdater/ALXAddonUpdater/ALX_AddonUpdaterEngine.py
ALXAddonUpdater/ALX_AddonUpdaterOperators.py
xnalara_io_Tools/modules/ALXAddonUpdater/ALXAddonUpdater/ALX_AddonUpdaterOperators.py
ALXAddonUpdater/ALX_AddonUpdaterOperators.py
xnalara_io_Tools/modules/ALXAddonUpdater/ALXAddonUpdater/ALX_AddonUpdaterOperators.py
ALXAddonUpdater/ALX_AddonUpdaterUI.py
xnalara_io_Tools/modules/ALXAddonUpdater/ALXAddonUpdater/ALX_AddonUpdaterUI.py
ALXAddonUpdater/ALX_AddonUpdaterUI.py
xnalara_io_Tools/modules/ALXAddonUpdater/ALXAddonUpdater/ALX_AddonUpdaterUI.py
ALXAddonUpdater/ALX_AddonUpdaterUtils.py
xnalara_io_Tools/modules/ALXAddonUpdater/ALXAddonUpdater/ALX_AddonUpdaterUtils.py
ALXAddonUpdater/ALX_AddonUpdaterUtils.py
xnalara_io_Tools/modules/ALXAddonUpdater/ALXAddonUpdater/ALX_AddonUpdaterUtils.py
+59
addon_zipper.py
+59
addon_zipper.py
···
1
+
import pathlib
2
+
import re
3
+
import shutil
4
+
from os import sep as os_separator
5
+
6
+
7
+
def main():
8
+
zip_addon()
9
+
10
+
11
+
def zip_addon(zip_name_includes_version: bool = False):
12
+
path = pathlib.Path("".join([folder if (folder[-1] == os_separator) else (f"{folder}{os_separator}") for folder in pathlib.Path(__file__).parts[:-1]]))
13
+
14
+
parent_path = path
15
+
folder_name = path.name
16
+
17
+
if (parent_path.is_dir()):
18
+
zip_source_path = pathlib.Path.joinpath(parent_path, folder_name)
19
+
zip_target_path = ""
20
+
21
+
if (zip_name_includes_version):
22
+
with zip_source_path.joinpath("__init__.py").open() as init_file:
23
+
init_content = init_file.read()
24
+
init_file.close()
25
+
26
+
addon_version_match = re.search(r"([\"\']version[\"\']\s*:\s*(\(\s*[0-9]*\,\s*[0-9]*\,\s*[0-9]*\)))", init_content)
27
+
if (addon_version_match is not None):
28
+
29
+
addon_version = str(
30
+
re.sub(
31
+
r"[\(*\)*]|\s*",
32
+
"",
33
+
str(
34
+
re.search(
35
+
r"(\(\s*[0-9]*\,\s*[0-9]*\,\s*[0-9]*\))",
36
+
str(addon_version_match)
37
+
).group()
38
+
)
39
+
)
40
+
).replace(",", ".")
41
+
42
+
zip_target_path = parent_path.joinpath(f"{folder_name}v{addon_version}")
43
+
else:
44
+
raise ValueError(f"Addon version not found Value is: {addon_version_match}")
45
+
else:
46
+
zip_target_path = parent_path.joinpath(f"{folder_name}")
47
+
48
+
shutil.copytree(zip_source_path, parent_path.joinpath("temp", folder_name))
49
+
temp_folder = parent_path.joinpath("temp")
50
+
51
+
zipfile = shutil.make_archive(zip_target_path, "zip", temp_folder)
52
+
shutil.rmtree(temp_folder)
53
+
54
+
else:
55
+
raise ValueError(f"Parent_Path is not a directory: {parent_path}")
56
+
57
+
58
+
if __name__ == '__main__':
59
+
main()
+55
xnalara_io_Tools/__init__.py
+55
xnalara_io_Tools/__init__.py
···
1
+
from . import xps_tools
2
+
from .modules.ALXAddonUpdater.ALXAddonUpdater.ALX_AddonUpdater import \
3
+
Alx_Addon_Updater
4
+
from .modules.ALXInfoSystem.ALXInfoSystem import ALX_InfoSystem
5
+
from .modules.ALXModuleManager.ALXModuleManager.ALX_ModuleManager import \
6
+
Alx_Module_Manager
7
+
8
+
bl_info = {
9
+
"name": "XNALara-io-Tools",
10
+
"author": "Valerie Bosco[Valy Arhal], johnzero7[Original Developer]",
11
+
"description": "Import-Export for XNALara/XPS files",
12
+
"version": (1, 2, 4),
13
+
"blender": (3, 6, 0),
14
+
"category": "Import-Export",
15
+
"location": "File > Import-Export > XNALara/XPS",
16
+
"doc_url": "https://github.com/Valerie-Bosco/XNALara-io-Tools/wiki",
17
+
"tracker_url": "https://github.com/Valerie-Bosco/XNALara-io-Tools/issues",
18
+
}
19
+
20
+
21
+
module_manager = Alx_Module_Manager(
22
+
path=__path__,
23
+
globals=globals(),
24
+
mute=True
25
+
)
26
+
addon_updater = Alx_Addon_Updater(
27
+
path=__path__,
28
+
bl_info=bl_info,
29
+
engine="Github",
30
+
engine_user_name="Valerie-Bosco",
31
+
engine_repo_name="XNALara-io-Tools",
32
+
manual_download_website="https://github.com/Valerie-Bosco/XNALara-io-Tools/releases/tag/main_branch_latest"
33
+
)
34
+
35
+
36
+
def register():
37
+
module_manager.developer_register_modules()
38
+
addon_updater.register_addon_updater(True)
39
+
40
+
# ALX_InfoSystem.register_info()
41
+
42
+
xps_tools.register()
43
+
44
+
45
+
def unregister():
46
+
module_manager.developer_unregister_modules()
47
+
addon_updater.unregister_addon_updater()
48
+
49
+
# ALX_InfoSystem.unregister_info()
50
+
51
+
xps_tools.unregister()
52
+
53
+
54
+
if __name__ == "__main__":
55
+
register()
+67
xnalara_io_Tools/armature_tools/xnal_armature_utilities.py
+67
xnalara_io_Tools/armature_tools/xnal_armature_utilities.py
···
1
+
from typing import Iterable
2
+
3
+
import bpy
4
+
5
+
from ..utilities.color_utilities import random_color_rgb
6
+
7
+
xnal_model_bone_names = []
8
+
9
+
10
+
def Xnal_CreateArmatureObject(name="Armature"):
11
+
armature_da = bpy.data.armatures.new(name)
12
+
armature_da.display_type = 'STICK'
13
+
armature_obj = bpy.data.objects.new(name, armature_da)
14
+
return armature_obj
15
+
16
+
17
+
def XnaL_AddRegisterBoneName(name: str):
18
+
xnal_model_bone_names.append(name)
19
+
20
+
21
+
def XnaL_ShowHideBones(bones: Iterable[bpy.types.Bone], visibility: bool):
22
+
try:
23
+
bones[0]
24
+
for bone in bones:
25
+
bone.hide = visibility
26
+
except:
27
+
pass
28
+
29
+
30
+
def XnaL_GetBoneNameByIndex(original_index: int):
31
+
try:
32
+
return xnal_model_bone_names[original_index]
33
+
except:
34
+
return None
35
+
36
+
37
+
def XnaL_CreateBoneCollection(armature_object: bpy.types.Object, mesh_object: bpy.types.Object):
38
+
armature: bpy.types.Armature = armature_object.data
39
+
pose_bone_normal_color = random_color_rgb()
40
+
pose_bone_select_color = random_color_rgb()
41
+
pose_bone_active_color = random_color_rgb()
42
+
43
+
if (bpy.app.version[0:2] in [(3, 6)]):
44
+
bone_group = armature.pose.bone_groups.new(name=mesh_object.name)
45
+
bone_group.color_set = "CUSTOM"
46
+
bone_group.colors.normal = pose_bone_normal_color
47
+
bone_group.colors.select = pose_bone_select_color
48
+
bone_group.colors.active = pose_bone_active_color
49
+
50
+
for bone_vertex_group_name in mesh_object.vertex_groups.keys():
51
+
pose_bone = armature.pose.bones.get(bone_vertex_group_name)
52
+
53
+
if (pose_bone is not None):
54
+
pose_bone.bone_group = bone_group
55
+
56
+
if (bpy.app.version[0:2] in [(4, 0), (4, 1), (4, 2), (4, 3), (4, 4)]):
57
+
bone_collection = armature.collections.new(name=mesh_object.name)
58
+
59
+
for bone_vertex_group_name in mesh_object.vertex_groups.keys():
60
+
pose_bone = armature_object.pose.bones.get(bone_vertex_group_name)
61
+
62
+
if (pose_bone is not None):
63
+
bone_collection.assign(pose_bone)
64
+
pose_bone.color.palette = "CUSTOM"
65
+
pose_bone.color.custom.normal = pose_bone_normal_color
66
+
pose_bone.color.custom.select = pose_bone_select_color
67
+
pose_bone.color.custom.active = pose_bone_active_color
+59
xnalara_io_Tools/ascii_ops.py
+59
xnalara_io_Tools/ascii_ops.py
···
1
+
def readline(file):
2
+
"""Read a line and strip spaces."""
3
+
line = file.readline()
4
+
line = line.strip()
5
+
return line
6
+
7
+
8
+
def getFloat(value):
9
+
"""Read value and returns a float. If error return NaN."""
10
+
if value:
11
+
try:
12
+
return float(value)
13
+
except ValueError:
14
+
return float('NaN')
15
+
return value
16
+
17
+
18
+
def getInt(value):
19
+
"""Read value and returns a int. If error return None."""
20
+
try:
21
+
return int(value)
22
+
except ValueError:
23
+
return None
24
+
25
+
26
+
def ignoreComment(line):
27
+
"""Read line. Ignore comment."""
28
+
line = line.replace('#', ' ')
29
+
line = line.split()[0]
30
+
return line
31
+
32
+
33
+
def ignoreStringComment(line):
34
+
"""Read line. Ignore comment."""
35
+
line = line.split('#')[0].strip()
36
+
return line
37
+
38
+
39
+
def readInt(file):
40
+
"""Read line. Return Int."""
41
+
line = readline(file)
42
+
value = ignoreComment(line)
43
+
number = getInt(value)
44
+
return number
45
+
46
+
47
+
def readString(file):
48
+
"""Read line. Ignore Comments."""
49
+
# String Lenght
50
+
line = readline(file)
51
+
string = ignoreStringComment(line)
52
+
return string
53
+
54
+
55
+
def splitValues(line):
56
+
"""Read line. Return value list."""
57
+
line = line.replace('#', ' ')
58
+
values = line.split()
59
+
return values
+122
xnalara_io_Tools/bin_ops.py
+122
xnalara_io_Tools/bin_ops.py
···
1
+
import struct
2
+
3
+
from . import xps_const
4
+
5
+
6
+
# Format
7
+
class TypeFormat:
8
+
SByte = '<b'
9
+
Byte = '<B'
10
+
Int16 = '<h'
11
+
UInt16 = '<H'
12
+
Int32 = '<i'
13
+
UInt32 = '<I'
14
+
Int64 = '<l'
15
+
UInt64 = '<L'
16
+
Single = '<f'
17
+
Double = '<d'
18
+
19
+
20
+
def roundToMultiple(numToRound, multiple):
21
+
return (numToRound + multiple - 1) // multiple * multiple
22
+
23
+
24
+
def readByte(file):
25
+
numberBin = file.read(1)
26
+
number = struct.unpack(TypeFormat.Byte, numberBin)[0]
27
+
return number
28
+
29
+
30
+
def writeByte(number):
31
+
bytesBin = struct.pack(TypeFormat.Byte, number)
32
+
return bytesBin
33
+
34
+
35
+
def readUInt16(file):
36
+
numberBin = file.read(2)
37
+
number = struct.unpack(TypeFormat.UInt16, numberBin)[0]
38
+
return number
39
+
40
+
41
+
def writeUInt16(number):
42
+
uInt16 = struct.pack(TypeFormat.UInt16, number)
43
+
return uInt16
44
+
45
+
46
+
def readInt16(file):
47
+
numberBin = file.read(2)
48
+
number = struct.unpack(TypeFormat.Int16, numberBin)[0]
49
+
return number
50
+
51
+
52
+
def writeInt16(number):
53
+
int16 = struct.pack(TypeFormat.Int16, number)
54
+
return int16
55
+
56
+
57
+
def readUInt32(file):
58
+
numberBin = file.read(4)
59
+
number = struct.unpack(TypeFormat.UInt32, numberBin)[0]
60
+
return number
61
+
62
+
63
+
def writeUInt32(number):
64
+
uInt32 = struct.pack(TypeFormat.UInt32, number)
65
+
return uInt32
66
+
67
+
68
+
def readSingle(file):
69
+
numberBin = file.read(4)
70
+
single = struct.unpack(TypeFormat.Single, numberBin)[0]
71
+
return single
72
+
73
+
74
+
def writeSingle(number):
75
+
single = struct.pack(TypeFormat.Single, number)
76
+
return single
77
+
78
+
79
+
def readString(file, length):
80
+
try:
81
+
pos1 = file.tell()
82
+
byteString = file.read(length)
83
+
pos2 = file.tell()
84
+
string = ''
85
+
string = decodeBytes(byteString)
86
+
except Exception:
87
+
print('*' * 40)
88
+
print('pos len', pos1)
89
+
print('pos str', pos2)
90
+
print('pos', file.tell())
91
+
print('len', length)
92
+
print('str', byteString)
93
+
string = decodeBytes(byteString)
94
+
return string
95
+
96
+
97
+
def writeString(string):
98
+
# String Lenght
99
+
byteString = encodeString(string)
100
+
return byteString
101
+
102
+
103
+
def decodeBytes(bytes):
104
+
# print(bytes)
105
+
return bytes.decode(xps_const.ENCODING_READ)
106
+
107
+
108
+
def encodeString(string):
109
+
# print(string)
110
+
return string.encode(xps_const.ENCODING_WRITE)
111
+
112
+
113
+
def hasHeader(fileformat='.xps'):
114
+
return fileformat == '.xps'
115
+
116
+
117
+
def hasTangentVersion(verMayor, verMinor, hasHeader=True):
118
+
return (verMinor <= 12 and verMayor <= 2) if hasHeader else True
119
+
120
+
121
+
def hasVariableWeights(verMayor, verMinor, hasHeader=True):
122
+
return (verMayor >= 3) if hasHeader else False
+942
xnalara_io_Tools/export_obj.py
+942
xnalara_io_Tools/export_obj.py
···
1
+
# ##### BEGIN GPL LICENSE BLOCK #####
2
+
#
3
+
# This program is free software; you can redistribute it and/or
4
+
# modify it under the terms of the GNU General Public License
5
+
# as published by the Free Software Foundation; either version 2
6
+
# of the License, or (at your option) any later version.
7
+
#
8
+
# This program is distributed in the hope that it will be useful,
9
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+
# GNU General Public License for more details.
12
+
#
13
+
# You should have received a copy of the GNU General Public License
14
+
# along with this program; if not, write to the Free Software Foundation,
15
+
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
16
+
#
17
+
# ##### END GPL LICENSE BLOCK #####
18
+
19
+
import operator
20
+
import os
21
+
22
+
import bpy
23
+
import bpy_extras.io_utils
24
+
import mathutils
25
+
from bpy_extras.wm_utils.progress_report import (ProgressReport,
26
+
ProgressReportSubstep)
27
+
28
+
29
+
def name_compat(name):
30
+
if name is None:
31
+
return 'None'
32
+
else:
33
+
return name.replace(' ', '_')
34
+
35
+
36
+
def mesh_triangulate(me):
37
+
import bmesh
38
+
bm = bmesh.new()
39
+
bm.from_mesh(me)
40
+
bmesh.ops.triangulate(bm, faces=bm.faces)
41
+
bm.to_mesh(me)
42
+
bm.free()
43
+
44
+
45
+
def write_arl(scene, filepath, path_mode, copy_set, mtl_dict, armatures):
46
+
source_dir = os.path.dirname(bpy.data.filepath)
47
+
dest_dir = os.path.dirname(filepath)
48
+
49
+
armature_ob, ob_mat, EXPORT_GLOBAL_MATRIX = armatures[0]
50
+
51
+
if armature_ob:
52
+
with open(filepath, "w", encoding="utf8", newline="\n") as f:
53
+
fw = f.write
54
+
fw('# XPS NGFF ARL Blender Exporter file: %r\n' %
55
+
(os.path.basename(bpy.data.filepath) or "None"))
56
+
fw('# Version: %g\n' % (0.1))
57
+
fw('%i # bone Count\n' % len(armature_ob.data.bones))
58
+
59
+
armature_data = armature_ob.data.copy()
60
+
armature_data.transform(EXPORT_GLOBAL_MATRIX * ob_mat)
61
+
62
+
bones = armature_data.bones
63
+
for bone in bones:
64
+
fw('%s\n' % bone.name)
65
+
parent_bone_id = -1
66
+
if bone.parent:
67
+
parent_bone_name = bone.parent.name
68
+
parent_bone_id = bones.find(parent_bone_name)
69
+
fw('%i\n' % parent_bone_id)
70
+
fw('%g %g %g\n' % bone.head_local[:])
71
+
72
+
73
+
def write_mtl(scene, filepath, path_mode, copy_set, mtl_dict):
74
+
from mathutils import Color, Vector
75
+
76
+
world = scene.world
77
+
if world:
78
+
world_amb = world.ambient_color
79
+
else:
80
+
world_amb = Color((0.0, 0.0, 0.0))
81
+
82
+
source_dir = os.path.dirname(bpy.data.filepath)
83
+
dest_dir = os.path.dirname(filepath)
84
+
85
+
with open(filepath, "w", encoding="utf8", newline="\n") as f:
86
+
fw = f.write
87
+
88
+
fw('# Blender MTL File: %r\n' %
89
+
(os.path.basename(bpy.data.filepath) or "None"))
90
+
fw('# Material Count: %i\n' % len(mtl_dict))
91
+
92
+
mtl_dict_values = list(mtl_dict.values())
93
+
mtl_dict_values.sort(key=lambda m: m[0])
94
+
95
+
# Write material/image combinations we have used.
96
+
# Using mtl_dict.values() directly gives un-predictable order.
97
+
for mtl_mat_name, mat, face_img in mtl_dict_values:
98
+
# Get the Blender data for the material and the image.
99
+
# Having an image named None will make a bug, dont do it :)
100
+
101
+
# Define a new material: matname_imgname
102
+
fw('\nnewmtl %s\n' % mtl_mat_name)
103
+
104
+
if mat:
105
+
use_mirror = mat.raytrace_mirror.use and mat.raytrace_mirror.reflect_factor != 0.0
106
+
107
+
# convert from blenders spec to 0 - 1000 range.
108
+
if mat.specular_shader == 'WARDISO':
109
+
tspec = (0.4 - mat.specular_slope) / 0.0004
110
+
else:
111
+
tspec = (mat.specular_hardness - 1) / 0.51
112
+
fw('Ns %.6f\n' % tspec)
113
+
del tspec
114
+
115
+
# Ambient
116
+
if use_mirror:
117
+
fw('Ka %.6f %.6f %.6f\n' % (mat.raytrace_mirror.reflect_factor * mat.mirror_color)[:])
118
+
else:
119
+
fw('Ka %.6f %.6f %.6f\n' % (mat.ambient, mat.ambient, mat.ambient)) # Do not use world color!
120
+
fw('Kd %.6f %.6f %.6f\n' % (mat.diffuse_intensity * mat.diffuse_color)[:]) # Diffuse
121
+
fw('Ks %.6f %.6f %.6f\n' % (mat.specular_intensity * mat.specular_color)[:]) # Specular
122
+
# Emission, not in original MTL standard but seems pretty common, see T45766.
123
+
# XXX Blender has no color emission, it's using diffuse color instead...
124
+
fw('Ke %.6f %.6f %.6f\n' % (mat.emit * mat.diffuse_color)[:])
125
+
if hasattr(mat, "raytrace_transparency") and hasattr(mat.raytrace_transparency, "ior"):
126
+
fw('Ni %.6f\n' % mat.raytrace_transparency.ior) # Refraction index
127
+
else:
128
+
fw('Ni %.6f\n' % 1.0)
129
+
fw('d %.6f\n' % mat.alpha) # Alpha (obj uses 'd' for dissolve)
130
+
131
+
# See http://en.wikipedia.org/wiki/Wavefront_.obj_file for whole list of values...
132
+
# Note that mapping is rather fuzzy sometimes, trying to do our best here.
133
+
if mat.use_shadeless:
134
+
fw('illum 0\n') # ignore lighting
135
+
elif mat.specular_intensity == 0:
136
+
fw('illum 1\n') # no specular.
137
+
elif use_mirror:
138
+
if mat.use_transparency and mat.transparency_method == 'RAYTRACE':
139
+
if mat.raytrace_mirror.fresnel != 0.0:
140
+
fw('illum 7\n') # Reflection, Transparency, Ray trace and Fresnel
141
+
else:
142
+
fw('illum 6\n') # Reflection, Transparency, Ray trace
143
+
elif mat.raytrace_mirror.fresnel != 0.0:
144
+
fw('illum 5\n') # Reflection, Ray trace and Fresnel
145
+
else:
146
+
fw('illum 3\n') # Reflection and Ray trace
147
+
elif mat.use_transparency and mat.transparency_method == 'RAYTRACE':
148
+
fw('illum 9\n') # 'Glass' transparency and no Ray trace reflection... fuzzy matching, but...
149
+
else:
150
+
fw('illum 2\n') # light normaly
151
+
152
+
else:
153
+
# Write a dummy material here?
154
+
fw('Ns 0\n')
155
+
fw('Ka %.6f %.6f %.6f\n' % world_amb[:]) # Ambient, uses mirror color,
156
+
fw('Kd 0.8 0.8 0.8\n')
157
+
fw('Ks 0.8 0.8 0.8\n')
158
+
fw('d 1\n') # No alpha
159
+
fw('illum 2\n') # light normaly
160
+
161
+
# Write images!
162
+
if face_img: # We have an image on the face!
163
+
filepath = face_img.filepath
164
+
if filepath: # may be '' for generated images
165
+
# write relative image path
166
+
filepath = bpy_extras.io_utils.path_reference(
167
+
filepath, source_dir, dest_dir,
168
+
path_mode, "", copy_set, face_img.library)
169
+
fw('map_Kd %s\n' % filepath) # Diffuse mapping image
170
+
del filepath
171
+
else:
172
+
# so we write the materials image.
173
+
face_img = None
174
+
175
+
if mat: # No face image. if we havea material search for MTex image.
176
+
image_map = {}
177
+
# backwards so topmost are highest priority
178
+
for mtex in reversed(mat.texture_slots):
179
+
if mtex and mtex.texture and mtex.texture.type == 'IMAGE':
180
+
image = mtex.texture.image
181
+
if image:
182
+
# texface overrides others
183
+
if (mtex.use_map_color_diffuse and
184
+
(face_img is None) and
185
+
(mtex.use_map_warp is False) and
186
+
(mtex.texture_coords != 'REFLECTION')):
187
+
image_map["map_Kd"] = (mtex, image)
188
+
if mtex.use_map_ambient:
189
+
image_map["map_Ka"] = (mtex, image)
190
+
# this is the Spec intensity channel but Ks stands for specular Color
191
+
'''
192
+
if mtex.use_map_specular:
193
+
image_map["map_Ks"] = (mtex, image)
194
+
'''
195
+
if mtex.use_map_color_spec: # specular color
196
+
image_map["map_Ks"] = (mtex, image)
197
+
if mtex.use_map_hardness: # specular hardness/glossiness
198
+
image_map["map_Ns"] = (mtex, image)
199
+
if mtex.use_map_alpha:
200
+
image_map["map_d"] = (mtex, image)
201
+
if mtex.use_map_translucency:
202
+
image_map["map_Tr"] = (mtex, image)
203
+
if mtex.use_map_normal:
204
+
image_map["map_Bump"] = (mtex, image)
205
+
if mtex.use_map_displacement:
206
+
image_map["disp"] = (mtex, image)
207
+
if mtex.use_map_color_diffuse and (mtex.texture_coords == 'REFLECTION'):
208
+
image_map["refl"] = (mtex, image)
209
+
if mtex.use_map_emit:
210
+
image_map["map_Ke"] = (mtex, image)
211
+
212
+
for key, (mtex, image) in sorted(image_map.items()):
213
+
filepath = bpy_extras.io_utils.path_reference(
214
+
image.filepath, source_dir, dest_dir,
215
+
path_mode, "", copy_set, image.library)
216
+
options = []
217
+
if key == "map_Bump":
218
+
if mtex.normal_factor != 1.0:
219
+
options.append('-bm %.6f' % mtex.normal_factor)
220
+
if mtex.offset != Vector((0.0, 0.0, 0.0)):
221
+
options.append('-o %.6f %.6f %.6f' % mtex.offset[:])
222
+
if mtex.scale != Vector((1.0, 1.0, 1.0)):
223
+
options.append('-s %.6f %.6f %.6f' % mtex.scale[:])
224
+
fw('%s %s %s\n' % (key, " ".join(options), repr(filepath)[1:-1]))
225
+
226
+
227
+
def test_nurbs_compat(ob):
228
+
if ob.type != 'CURVE':
229
+
return False
230
+
231
+
for nu in ob.data.splines:
232
+
if nu.point_count_v == 1 and nu.type != 'BEZIER': # not a surface and not bezier
233
+
return True
234
+
235
+
return False
236
+
237
+
238
+
def write_nurb(fw, ob, ob_mat):
239
+
tot_verts = 0
240
+
cu = ob.data
241
+
242
+
# use negative indices
243
+
for nu in cu.splines:
244
+
if nu.type == 'POLY':
245
+
DEG_ORDER_U = 1
246
+
else:
247
+
DEG_ORDER_U = nu.order_u - 1 # odd but tested to be correct
248
+
249
+
if nu.type == 'BEZIER':
250
+
print("\tWarning, bezier curve:", ob.name, "only poly and nurbs curves supported")
251
+
continue
252
+
253
+
if nu.point_count_v > 1:
254
+
print("\tWarning, surface:", ob.name, "only poly and nurbs curves supported")
255
+
continue
256
+
257
+
if len(nu.points) <= DEG_ORDER_U:
258
+
print("\tWarning, order_u is lower then vert count, skipping:", ob.name)
259
+
continue
260
+
261
+
pt_num = 0
262
+
do_closed = nu.use_cyclic_u
263
+
do_endpoints = (do_closed == 0) and nu.use_endpoint_u
264
+
265
+
for pt in nu.points:
266
+
fw('v %.6f %.6f %.6f\n' % (ob_mat * pt.co.to_3d())[:])
267
+
pt_num += 1
268
+
tot_verts += pt_num
269
+
270
+
fw('g %s\n' % (name_compat(ob.name))) # name_compat(ob.getData(1)) could use the data name too
271
+
fw('cstype bspline\n') # not ideal, hard coded
272
+
fw('deg %d\n' % DEG_ORDER_U) # not used for curves but most files have it still
273
+
274
+
curve_ls = [-(i + 1) for i in range(pt_num)]
275
+
276
+
# 'curv' keyword
277
+
if do_closed:
278
+
if DEG_ORDER_U == 1:
279
+
pt_num += 1
280
+
curve_ls.append(-1)
281
+
else:
282
+
pt_num += DEG_ORDER_U
283
+
curve_ls = curve_ls + curve_ls[0:DEG_ORDER_U]
284
+
285
+
fw('curv 0.0 1.0 %s\n' % (" ".join([str(i) for i in curve_ls]))) # Blender has no U and V values for the curve
286
+
287
+
# 'parm' keyword
288
+
tot_parm = (DEG_ORDER_U + 1) + pt_num
289
+
tot_parm_div = float(tot_parm - 1)
290
+
parm_ls = [(i / tot_parm_div) for i in range(tot_parm)]
291
+
292
+
if do_endpoints: # end points, force param
293
+
for i in range(DEG_ORDER_U + 1):
294
+
parm_ls[i] = 0.0
295
+
parm_ls[-(1 + i)] = 1.0
296
+
297
+
fw("parm u %s\n" % " ".join(["%.6f" % i for i in parm_ls]))
298
+
299
+
fw('end\n')
300
+
301
+
return tot_verts
302
+
303
+
304
+
def write_file(filepath, objects, scene,
305
+
EXPORT_TRI=False,
306
+
EXPORT_EDGES=False,
307
+
EXPORT_SMOOTH_GROUPS=False,
308
+
EXPORT_SMOOTH_GROUPS_BITFLAGS=False,
309
+
EXPORT_NORMALS=False,
310
+
EXPORT_VCOLORS=False,
311
+
EXPORT_UV=True,
312
+
EXPORT_MTL=True,
313
+
EXPORT_APPLY_MODIFIERS=True,
314
+
EXPORT_BLEN_OBS=True,
315
+
EXPORT_GROUP_BY_OB=False,
316
+
EXPORT_GROUP_BY_MAT=False,
317
+
EXPORT_KEEP_VERT_ORDER=False,
318
+
EXPORT_POLYGROUPS=False,
319
+
EXPORT_CURVE_AS_NURBS=True,
320
+
EXPORT_GLOBAL_MATRIX=None,
321
+
EXPORT_PATH_MODE='AUTO',
322
+
progress=ProgressReport(),
323
+
):
324
+
"""
325
+
Basic write function. The context and options must be already set
326
+
This can be accessed externaly
327
+
eg.
328
+
write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options.
329
+
"""
330
+
if EXPORT_GLOBAL_MATRIX is None:
331
+
EXPORT_GLOBAL_MATRIX = mathutils.Matrix()
332
+
333
+
def veckey3d(v):
334
+
return round(v.x, 4), round(v.y, 4), round(v.z, 4)
335
+
336
+
def colkey4d(v):
337
+
return round(v[0], 4), round(v[1], 4), round(v[2], 4), 1
338
+
339
+
def veckey2d(v):
340
+
return round(v[0], 4), round(v[1], 4)
341
+
342
+
def findVertexGroupName(face, vWeightMap):
343
+
"""
344
+
Searches the vertexDict to see what groups is assigned to a given face.
345
+
We use a frequency system in order to sort out the name because a given vetex can
346
+
belong to two or more groups at the same time. To find the right name for the face
347
+
we list all the possible vertex group names with their frequency and then sort by
348
+
frequency in descend order. The top element is the one shared by the highest number
349
+
of vertices is the face's group
350
+
"""
351
+
weightDict = {}
352
+
for vert_index in face.vertices:
353
+
vWeights = vWeightMap[vert_index]
354
+
for vGroupName, weight in vWeights:
355
+
weightDict[vGroupName] = weightDict.get(vGroupName, 0.0) + weight
356
+
357
+
if weightDict:
358
+
return max((weight, vGroupName) for vGroupName, weight in weightDict.items())[1]
359
+
else:
360
+
return '(null)'
361
+
362
+
with ProgressReportSubstep(progress, 2, "OBJ Export path: %r" % filepath, "OBJ Export Finished") as subprogress1:
363
+
with open(filepath, "w", encoding="utf8", newline="\n") as f:
364
+
fw = f.write
365
+
366
+
# Write Header
367
+
fw('# Blender v%s OBJ File: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
368
+
fw('# www.blender.org\n')
369
+
370
+
# Tell the obj file what material file to use.
371
+
if EXPORT_MTL:
372
+
mtlfilepath = os.path.splitext(filepath)[0] + ".mtl"
373
+
# filepath can contain non utf8 chars, use repr
374
+
fw('mtllib %s\n' % repr(os.path.basename(mtlfilepath))[1:-1])
375
+
376
+
# Tell the obj file what armature file to use.
377
+
EXPORT_ARL = True
378
+
if EXPORT_ARL:
379
+
arlfilepath = os.path.splitext(filepath)[0] + ".arl"
380
+
# filepath can contain non utf8 chars, use repr
381
+
fw('arllib %s\n' % repr(os.path.basename(arlfilepath))[1:-1])
382
+
383
+
# Initialize totals, these are updated each object
384
+
totverts = totuvco = totno = totvcol = 1
385
+
386
+
face_vert_index = 1
387
+
388
+
# A Dict of Materials
389
+
# (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
390
+
mtl_dict = {}
391
+
# Used to reduce the usage of matname_texname materials, which can become annoying in case of
392
+
# repeated exports/imports, yet keeping unique mat names per keys!
393
+
# mtl_name: (material.name, image.name)
394
+
mtl_rev_dict = {}
395
+
396
+
copy_set = set()
397
+
398
+
# Get all meshes
399
+
subprogress1.enter_substeps(len(objects))
400
+
armatures = []
401
+
for i, ob_main in enumerate(sorted(objects, key=operator.attrgetter('name'))):
402
+
armature = ob_main.find_armature()
403
+
if armature:
404
+
armatures += [[armature, armature.matrix_world, EXPORT_GLOBAL_MATRIX]]
405
+
# ignore dupli children
406
+
if ob_main.parent and ob_main.parent.dupli_type in {'VERTS', 'FACES'}:
407
+
# XXX
408
+
subprogress1.step("Ignoring %s, dupli child..." % ob_main.name)
409
+
continue
410
+
411
+
obs = [(ob_main, ob_main.matrix_world)]
412
+
if ob_main.dupli_type != 'NONE':
413
+
# XXX
414
+
print('creating dupli_list on', ob_main.name)
415
+
ob_main.dupli_list_create(scene)
416
+
417
+
obs += [(dob.object, dob.matrix) for dob in ob_main.dupli_list]
418
+
419
+
# XXX debug print
420
+
print(ob_main.name, 'has', len(obs) - 1, 'dupli children')
421
+
422
+
subprogress1.enter_substeps(len(obs))
423
+
for ob, ob_mat in obs:
424
+
with ProgressReportSubstep(subprogress1, 6) as subprogress2:
425
+
uv_unique_count = no_unique_count = vc_unique_count = 0
426
+
427
+
# Nurbs curve support
428
+
if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
429
+
ob_mat = EXPORT_GLOBAL_MATRIX * ob_mat
430
+
totverts += write_nurb(fw, ob, ob_mat)
431
+
continue
432
+
# END NURBS
433
+
434
+
try:
435
+
me = ob.to_mesh(scene, EXPORT_APPLY_MODIFIERS, 'PREVIEW', calc_tessface=False)
436
+
except RuntimeError:
437
+
me = None
438
+
439
+
if me is None:
440
+
continue
441
+
442
+
me.transform(EXPORT_GLOBAL_MATRIX * ob_mat)
443
+
444
+
if EXPORT_TRI:
445
+
# _must_ do this first since it re-allocs arrays
446
+
mesh_triangulate(me)
447
+
448
+
if EXPORT_UV:
449
+
faceuv = len(me.uv_textures) > 0
450
+
if faceuv:
451
+
uv_texture = me.uv_textures.active.data[:]
452
+
uv_layer = me.uv_layers.active.data[:]
453
+
else:
454
+
faceuv = False
455
+
456
+
me_verts = me.vertices[:]
457
+
458
+
# Make our own list so it can be sorted to reduce context switching
459
+
face_index_pairs = [(face, index) for index, face in enumerate(me.polygons)]
460
+
# faces = [ f for f in me.tessfaces ]
461
+
462
+
if EXPORT_EDGES:
463
+
edges = me.edges
464
+
else:
465
+
edges = []
466
+
467
+
if not (len(face_index_pairs) + len(edges) + len(me.vertices)): # Make sure there is something to write
468
+
# clean up
469
+
bpy.data.meshes.remove(me)
470
+
continue # dont bother with this mesh.
471
+
472
+
if (bpy.app.version[0:2] in [(3, 6), (4, 0)]):
473
+
if EXPORT_NORMALS and face_index_pairs:
474
+
me.calc_normals_split()
475
+
# No need to call me.free_normals_split later, as this mesh is deleted anyway!
476
+
477
+
loops = me.loops
478
+
vcolors = []
479
+
if me.vertex_colors:
480
+
vcolors = me.vertex_colors[0]
481
+
482
+
if (EXPORT_SMOOTH_GROUPS or EXPORT_SMOOTH_GROUPS_BITFLAGS) and face_index_pairs:
483
+
smooth_groups, smooth_groups_tot = me.calc_smooth_groups(EXPORT_SMOOTH_GROUPS_BITFLAGS)
484
+
if smooth_groups_tot <= 1:
485
+
smooth_groups, smooth_groups_tot = (), 0
486
+
else:
487
+
smooth_groups, smooth_groups_tot = (), 0
488
+
489
+
materials = me.materials[:]
490
+
material_names = [m.name if m else None for m in materials]
491
+
492
+
# avoid bad index errors
493
+
if not materials:
494
+
materials = [None]
495
+
material_names = [name_compat(None)]
496
+
497
+
# Sort by Material, then images
498
+
# so we dont over context switch in the obj file.
499
+
if EXPORT_KEEP_VERT_ORDER:
500
+
pass
501
+
else:
502
+
if faceuv:
503
+
if smooth_groups:
504
+
def sort_func(a): return (a[0].material_index,
505
+
hash(uv_texture[a[1]].image),
506
+
smooth_groups[a[1]] if a[0].use_smooth else False)
507
+
else:
508
+
def sort_func(a): return (a[0].material_index,
509
+
hash(uv_texture[a[1]].image),
510
+
a[0].use_smooth)
511
+
elif len(materials) > 1:
512
+
if smooth_groups:
513
+
def sort_func(a): return (a[0].material_index,
514
+
smooth_groups[a[1]] if a[0].use_smooth else False)
515
+
else:
516
+
def sort_func(a): return (a[0].material_index,
517
+
a[0].use_smooth)
518
+
else:
519
+
# no materials
520
+
if smooth_groups:
521
+
def sort_func(a): return smooth_groups[a[1] if a[0].use_smooth else False]
522
+
else:
523
+
def sort_func(a): return a[0].use_smooth
524
+
525
+
face_index_pairs.sort(key=sort_func)
526
+
527
+
del sort_func
528
+
529
+
# Set the default mat to no material and no image.
530
+
contextMat = 0, 0 # Can never be this, so we will label a new material the first chance we get.
531
+
contextSmooth = None # Will either be true or false, set bad to force initialization switch.
532
+
533
+
if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
534
+
name1 = ob.name
535
+
name2 = ob.data.name
536
+
if name1 == name2:
537
+
obnamestring = name_compat(name1)
538
+
else:
539
+
obnamestring = '%s_%s' % (name_compat(name1), name_compat(name2))
540
+
541
+
if EXPORT_BLEN_OBS:
542
+
fw('o %s\n' % obnamestring) # Write Object name
543
+
else: # if EXPORT_GROUP_BY_OB:
544
+
fw('g %s\n' % obnamestring)
545
+
546
+
subprogress2.step()
547
+
548
+
# Vert
549
+
for v in me_verts:
550
+
fw('v %.6f %.6f %.6f\n' % v.co[:])
551
+
552
+
subprogress2.step()
553
+
554
+
# UV
555
+
if faceuv:
556
+
# in case removing some of these dont get defined.
557
+
uv = f_index = uv_index = uv_key = uv_val = uv_ls = None
558
+
559
+
uv_face_mapping = [None] * len(face_index_pairs)
560
+
561
+
uv_dict = {}
562
+
uv_get = uv_dict.get
563
+
for f, f_index in face_index_pairs:
564
+
uv_ls = uv_face_mapping[f_index] = []
565
+
for uv_index, l_index in enumerate(f.loop_indices):
566
+
uv = uv_layer[l_index].uv
567
+
# include the vertex index in the key so we don't share UV's between vertices,
568
+
# allowed by the OBJ spec but can cause issues for other importers, see: T47010.
569
+
570
+
# this works too, shared UV's for all verts
571
+
# ~ uv_key = veckey2d(uv)
572
+
uv_key = loops[l_index].vertex_index, veckey2d(uv)
573
+
574
+
uv_val = uv_get(uv_key)
575
+
if uv_val is None:
576
+
uv_val = uv_dict[uv_key] = uv_unique_count
577
+
fw('vt %.4f %.4f\n' % uv[:])
578
+
uv_unique_count += 1
579
+
uv_ls.append(uv_val)
580
+
581
+
del uv_dict, uv, f_index, uv_index, uv_ls, uv_get, uv_key, uv_val
582
+
# Only need uv_unique_count and uv_face_mapping
583
+
584
+
subprogress2.step()
585
+
586
+
# NORMAL, Smooth/Non smoothed.
587
+
if EXPORT_NORMALS:
588
+
no_key = no_val = None
589
+
normals_to_idx = {}
590
+
no_get = normals_to_idx.get
591
+
loops_to_normals = [0] * len(loops)
592
+
for f, f_index in face_index_pairs:
593
+
for l_idx in f.loop_indices:
594
+
no_key = veckey3d(loops[l_idx].normal)
595
+
no_val = no_get(no_key)
596
+
if no_val is None:
597
+
no_val = normals_to_idx[no_key] = no_unique_count
598
+
fw('vn %.4f %.4f %.4f\n' % no_key)
599
+
no_unique_count += 1
600
+
loops_to_normals[l_idx] = no_val
601
+
del normals_to_idx, no_get, no_key, no_val
602
+
else:
603
+
loops_to_normals = []
604
+
605
+
# Vertex Color
606
+
if EXPORT_VCOLORS and vcolors:
607
+
no_key = no_val = None
608
+
vcolors_to_idx = {}
609
+
no_get = vcolors_to_idx.get
610
+
loops_to_vcolors = [0] * len(vcolors.data)
611
+
for f, f_index in face_index_pairs:
612
+
for l_idx in f.loop_indices:
613
+
no_key = colkey4d(vcolors.data[l_idx].color)
614
+
no_val = no_get(no_key)
615
+
if no_val is None:
616
+
no_val = vcolors_to_idx[no_key] = vc_unique_count
617
+
fw('vc %.4f %.4f %.4f %.4f\n' % no_key)
618
+
vc_unique_count += 1
619
+
loops_to_vcolors[l_idx] = no_val
620
+
del vcolors_to_idx, no_get, no_key, no_val
621
+
else:
622
+
loops_to_vcolors = []
623
+
624
+
# Vertex wights
625
+
EXPORT_ARL = True
626
+
vweights = ob.vertex_groups
627
+
armature_ob = ob.find_armature()
628
+
armature_data = armature_ob.data
629
+
if EXPORT_ARL and armature:
630
+
for v in me_verts:
631
+
weights = [[armature_data.bones.find(vweights[g.group].name), g.weight] for g in v.groups]
632
+
weights += [[0, 0]] * (4 - len(weights))
633
+
weights.sort(key=operator.itemgetter(1), reverse=True)
634
+
fw('bw [%s]\n' % ', '.join('[%i,%g]' % tuple(pair) for pair in weights))
635
+
636
+
if not faceuv:
637
+
f_image = None
638
+
639
+
subprogress2.step()
640
+
641
+
# XXX
642
+
if EXPORT_POLYGROUPS:
643
+
# Retrieve the list of vertex groups
644
+
vertGroupNames = ob.vertex_groups.keys()
645
+
if vertGroupNames:
646
+
currentVGroup = ''
647
+
# Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
648
+
vgroupsMap = [[] for _i in range(len(me_verts))]
649
+
for v_idx, v_ls in enumerate(vgroupsMap):
650
+
v_ls[:] = [(vertGroupNames[g.group], g.weight) for g in me_verts[v_idx].groups]
651
+
652
+
for f, f_index in face_index_pairs:
653
+
f_smooth = f.use_smooth
654
+
if f_smooth and smooth_groups:
655
+
f_smooth = smooth_groups[f_index]
656
+
f_mat = min(f.material_index, len(materials) - 1)
657
+
658
+
if faceuv:
659
+
tface = uv_texture[f_index]
660
+
f_image = tface.image
661
+
662
+
# MAKE KEY
663
+
if faceuv and f_image: # Object is always true.
664
+
key = material_names[f_mat], f_image.name
665
+
else:
666
+
key = material_names[f_mat], None # No image, use None instead.
667
+
668
+
# Write the vertex group
669
+
if EXPORT_POLYGROUPS:
670
+
if vertGroupNames:
671
+
# find what vertext group the face belongs to
672
+
vgroup_of_face = findVertexGroupName(f, vgroupsMap)
673
+
if vgroup_of_face != currentVGroup:
674
+
currentVGroup = vgroup_of_face
675
+
fw('g %s\n' % vgroup_of_face)
676
+
677
+
# CHECK FOR CONTEXT SWITCH
678
+
if key == contextMat:
679
+
pass # Context already switched, dont do anything
680
+
else:
681
+
if key[0] is None and key[1] is None:
682
+
# Write a null material, since we know the context has changed.
683
+
if EXPORT_GROUP_BY_MAT:
684
+
# can be mat_image or (null)
685
+
fw("g %s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name)))
686
+
if EXPORT_MTL:
687
+
fw("usemtl (null)\n") # mat, image
688
+
689
+
else:
690
+
mat_data = mtl_dict.get(key)
691
+
if not mat_data:
692
+
# First add to global dict so we can export to mtl
693
+
# Then write mtl
694
+
695
+
# Make a new names from the mat and image name,
696
+
# converting any spaces to underscores with name_compat.
697
+
698
+
# If none image dont bother adding it to the name
699
+
# Try to avoid as much as possible adding texname (or other things)
700
+
# to the mtl name (see [#32102])...
701
+
mtl_name = "%s" % name_compat(key[0])
702
+
if mtl_rev_dict.get(mtl_name, None) not in {key, None}:
703
+
if key[1] is None:
704
+
tmp_ext = "_NONE"
705
+
else:
706
+
tmp_ext = "_%s" % name_compat(key[1])
707
+
i = 0
708
+
while mtl_rev_dict.get(mtl_name + tmp_ext, None) not in {key, None}:
709
+
i += 1
710
+
tmp_ext = "_%3d" % i
711
+
mtl_name += tmp_ext
712
+
mat_data = mtl_dict[key] = mtl_name, materials[f_mat], f_image
713
+
mtl_rev_dict[mtl_name] = key
714
+
715
+
if EXPORT_GROUP_BY_MAT:
716
+
# can be mat_image or (null)
717
+
fw("g %s_%s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name), mat_data[0]))
718
+
if EXPORT_MTL:
719
+
fw("usemtl %s\n" % mat_data[0]) # can be mat_image or (null)
720
+
721
+
contextMat = key
722
+
if f_smooth != contextSmooth:
723
+
if f_smooth: # on now off
724
+
if smooth_groups:
725
+
f_smooth = smooth_groups[f_index]
726
+
fw('s %d\n' % f_smooth)
727
+
else:
728
+
fw('s 1\n')
729
+
else: # was off now on
730
+
fw('s off\n')
731
+
contextSmooth = f_smooth
732
+
733
+
f_v = [(vi, me_verts[v_idx], l_idx)
734
+
for vi, (v_idx, l_idx) in enumerate(zip(f.vertices, f.loop_indices))]
735
+
736
+
fw('f')
737
+
if faceuv:
738
+
if EXPORT_NORMALS:
739
+
for vi, v, li in f_v:
740
+
fw(" %d/%d/%d" % (totverts + v.index,
741
+
totuvco + uv_face_mapping[f_index][vi],
742
+
totno + loops_to_normals[li],
743
+
)) # vert, uv, normal
744
+
if EXPORT_VCOLORS and vcolors:
745
+
fw("/%d" % (totvcol + loops_to_vcolors[li])) # add vcolor
746
+
else: # No Normals
747
+
for vi, v, li in f_v:
748
+
fw(" %d/%d" % (totverts + v.index,
749
+
totuvco + uv_face_mapping[f_index][vi],
750
+
)) # vert, uv
751
+
if EXPORT_VCOLORS and vcolors:
752
+
fw("//%d" % (totvcol + loops_to_vcolors[li])) # add vcolor
753
+
754
+
face_vert_index += len(f_v)
755
+
756
+
else: # No UV's
757
+
if EXPORT_NORMALS:
758
+
for vi, v, li in f_v:
759
+
fw(" %d//%d" % (totverts + v.index, totno + loops_to_normals[li]))
760
+
if EXPORT_VCOLORS and vcolors:
761
+
fw("/%d" % (totvcol + loops_to_vcolors[li])) # add vcolor
762
+
else: # No Normals
763
+
for vi, v, li in f_v:
764
+
fw(" %d" % (totverts + v.index))
765
+
if EXPORT_VCOLORS and vcolors:
766
+
fw("///%d" % (totvcol + loops_to_vcolors[li])) # add vcolor
767
+
fw('\n')
768
+
769
+
subprogress2.step()
770
+
771
+
# Write edges.
772
+
if EXPORT_EDGES:
773
+
for ed in edges:
774
+
if ed.is_loose:
775
+
fw('l %d %d\n' % (totverts + ed.vertices[0], totverts + ed.vertices[1]))
776
+
777
+
# Make the indices global rather then per mesh
778
+
totverts += len(me_verts)
779
+
totuvco += uv_unique_count
780
+
totno += no_unique_count
781
+
totvcol += vc_unique_count
782
+
783
+
# clean up
784
+
bpy.data.meshes.remove(me)
785
+
786
+
if ob_main.dupli_type != 'NONE':
787
+
ob_main.dupli_list_clear()
788
+
789
+
subprogress1.leave_substeps("Finished writing geometry of '%s'." % ob_main.name)
790
+
subprogress1.leave_substeps()
791
+
792
+
subprogress1.step("Finished exporting geometry, now exporting materials")
793
+
794
+
# Now we have all our materials, save them
795
+
if EXPORT_MTL:
796
+
write_mtl(scene, mtlfilepath, EXPORT_PATH_MODE, copy_set, mtl_dict)
797
+
798
+
# Save the armature
799
+
if EXPORT_ARL:
800
+
write_arl(scene, arlfilepath, EXPORT_PATH_MODE, copy_set, mtl_dict, armatures)
801
+
802
+
# copy all collected files.
803
+
bpy_extras.io_utils.path_reference_copy(copy_set)
804
+
805
+
806
+
def _write(context, filepath,
807
+
EXPORT_TRI, # ok
808
+
EXPORT_EDGES,
809
+
EXPORT_SMOOTH_GROUPS,
810
+
EXPORT_SMOOTH_GROUPS_BITFLAGS,
811
+
EXPORT_NORMALS, # ok
812
+
EXPORT_VCOLORS, # ok
813
+
EXPORT_UV, # ok
814
+
EXPORT_MTL,
815
+
EXPORT_APPLY_MODIFIERS, # ok
816
+
EXPORT_BLEN_OBS,
817
+
EXPORT_GROUP_BY_OB,
818
+
EXPORT_GROUP_BY_MAT,
819
+
EXPORT_KEEP_VERT_ORDER,
820
+
EXPORT_POLYGROUPS,
821
+
EXPORT_CURVE_AS_NURBS,
822
+
EXPORT_SEL_ONLY, # ok
823
+
EXPORT_ANIMATION,
824
+
EXPORT_GLOBAL_MATRIX,
825
+
EXPORT_PATH_MODE, # Not used
826
+
):
827
+
828
+
with ProgressReport(context.window_manager) as progress:
829
+
base_name, ext = os.path.splitext(filepath)
830
+
context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
831
+
832
+
scene = context.scene
833
+
834
+
# Exit edit mode before exporting, so current object states are exported properly.
835
+
if bpy.ops.object.mode_set.poll():
836
+
bpy.ops.object.mode_set(mode='OBJECT')
837
+
838
+
orig_frame = scene.frame_current
839
+
840
+
# Export an animation?
841
+
if EXPORT_ANIMATION:
842
+
scene_frames = range(scene.frame_start, scene.frame_end + 1) # Up to and including the end frame.
843
+
else:
844
+
scene_frames = [orig_frame] # Dont export an animation.
845
+
846
+
# Loop through all frames in the scene and export.
847
+
progress.enter_substeps(len(scene_frames))
848
+
for frame in scene_frames:
849
+
if EXPORT_ANIMATION: # Add frame to the filepath.
850
+
context_name[2] = '_%.6d' % frame
851
+
852
+
scene.frame_set(frame, 0.0)
853
+
if EXPORT_SEL_ONLY:
854
+
objects = context.selected_objects
855
+
else:
856
+
objects = scene.objects
857
+
858
+
full_path = ''.join(context_name)
859
+
860
+
# erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
861
+
# EXPORT THE FILE.
862
+
progress.enter_substeps(1)
863
+
write_file(full_path, objects, scene,
864
+
EXPORT_TRI,
865
+
EXPORT_EDGES,
866
+
EXPORT_SMOOTH_GROUPS,
867
+
EXPORT_SMOOTH_GROUPS_BITFLAGS,
868
+
EXPORT_NORMALS,
869
+
EXPORT_VCOLORS,
870
+
EXPORT_UV,
871
+
EXPORT_MTL,
872
+
EXPORT_APPLY_MODIFIERS,
873
+
EXPORT_BLEN_OBS,
874
+
EXPORT_GROUP_BY_OB,
875
+
EXPORT_GROUP_BY_MAT,
876
+
EXPORT_KEEP_VERT_ORDER,
877
+
EXPORT_POLYGROUPS,
878
+
EXPORT_CURVE_AS_NURBS,
879
+
EXPORT_GLOBAL_MATRIX,
880
+
EXPORT_PATH_MODE,
881
+
progress,
882
+
)
883
+
progress.leave_substeps()
884
+
885
+
scene.frame_set(orig_frame, 0.0)
886
+
progress.leave_substeps()
887
+
888
+
889
+
"""
890
+
Currently the exporter lacks these features:
891
+
* multiple scene export (only active scene is written)
892
+
* particles
893
+
"""
894
+
895
+
896
+
def save(context,
897
+
filepath,
898
+
*,
899
+
use_triangles=False,
900
+
use_edges=True,
901
+
use_normals=False,
902
+
use_vcolors=False,
903
+
use_smooth_groups=False,
904
+
use_smooth_groups_bitflags=False,
905
+
use_uvs=True,
906
+
use_materials=True,
907
+
use_mesh_modifiers=True,
908
+
use_blen_objects=True,
909
+
group_by_object=False,
910
+
group_by_material=False,
911
+
keep_vertex_order=False,
912
+
use_vertex_groups=False,
913
+
use_nurbs=True,
914
+
use_selection=True,
915
+
use_animation=False,
916
+
global_matrix=None,
917
+
path_mode='AUTO'
918
+
):
919
+
920
+
_write(context, filepath,
921
+
EXPORT_TRI=use_triangles,
922
+
EXPORT_EDGES=use_edges,
923
+
EXPORT_SMOOTH_GROUPS=use_smooth_groups,
924
+
EXPORT_SMOOTH_GROUPS_BITFLAGS=use_smooth_groups_bitflags,
925
+
EXPORT_NORMALS=use_normals,
926
+
EXPORT_VCOLORS=use_vcolors,
927
+
EXPORT_UV=use_uvs,
928
+
EXPORT_MTL=use_materials,
929
+
EXPORT_APPLY_MODIFIERS=use_mesh_modifiers,
930
+
EXPORT_BLEN_OBS=use_blen_objects,
931
+
EXPORT_GROUP_BY_OB=group_by_object,
932
+
EXPORT_GROUP_BY_MAT=group_by_material,
933
+
EXPORT_KEEP_VERT_ORDER=keep_vertex_order,
934
+
EXPORT_POLYGROUPS=use_vertex_groups,
935
+
EXPORT_CURVE_AS_NURBS=use_nurbs,
936
+
EXPORT_SEL_ONLY=use_selection,
937
+
EXPORT_ANIMATION=use_animation,
938
+
EXPORT_GLOBAL_MATRIX=global_matrix,
939
+
EXPORT_PATH_MODE=path_mode,
940
+
)
941
+
942
+
return {'FINISHED'}
+513
xnalara_io_Tools/export_xnalara_model.py
+513
xnalara_io_Tools/export_xnalara_model.py
···
1
+
import os
2
+
from collections import Counter
3
+
4
+
import bpy
5
+
from mathutils import Vector
6
+
7
+
from . import (bin_ops, export_xnalara_pose, import_xnalara_pose,
8
+
mock_xps_data, node_shader_utils, write_ascii_xps,
9
+
write_bin_xps, xps_material, xps_types)
10
+
from .timing import timing
11
+
12
+
# imported XPS directory
13
+
rootDir = ''
14
+
15
+
16
+
def coordTransform(coords):
17
+
x, y, z = coords
18
+
y = -y
19
+
return (x, z, y)
20
+
21
+
22
+
def faceTransform(face):
23
+
return [face[0], face[2], face[1]]
24
+
25
+
26
+
def uvTransform(uv):
27
+
u = uv[0] + xpsSettings.uvDisplX
28
+
v = 1 - xpsSettings.uvDisplY - uv[1]
29
+
return [u, v]
30
+
31
+
32
+
def rangeFloatToByte(float):
33
+
return int(float * 255) % 256
34
+
35
+
36
+
def rangeByteToFloat(byte):
37
+
return byte / 255
38
+
39
+
40
+
def uvTransformLayers(uvLayers):
41
+
return list(map(uvTransform, uvLayers))
42
+
43
+
44
+
def getArmature(selected_obj):
45
+
armature_obj = next((obj for obj in selected_obj
46
+
if obj.type == 'ARMATURE'), None)
47
+
return armature_obj
48
+
49
+
50
+
def fillArray(array, minLen, value):
51
+
# Complete the array with selected value
52
+
filled = array + [value] * (minLen - len(array))
53
+
return filled
54
+
55
+
56
+
def getOutputFilename(xpsSettingsAux):
57
+
global xpsSettings
58
+
xpsSettings = xpsSettingsAux
59
+
60
+
blenderExportSetup()
61
+
xpsExport()
62
+
blenderExportFinalize()
63
+
64
+
65
+
def blenderExportSetup():
66
+
# switch to object mode and deselect all
67
+
objectMode()
68
+
69
+
70
+
def blenderExportFinalize():
71
+
pass
72
+
73
+
74
+
def objectMode():
75
+
current_mode = bpy.context.mode
76
+
if bpy.context.view_layer.objects.active and current_mode != 'OBJECT':
77
+
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
78
+
79
+
80
+
def saveXpsFile(filename, xpsData):
81
+
dirpath, file = os.path.split(filename)
82
+
basename, ext = os.path.splitext(file)
83
+
if ext.lower() in ('.mesh', '.xps'):
84
+
write_bin_xps.writeXpsModel(xpsSettings, filename, xpsData)
85
+
elif ext.lower() in ('.ascii'):
86
+
write_ascii_xps.writeXpsModel(xpsSettings, filename, xpsData)
87
+
88
+
89
+
@timing
90
+
def xpsExport():
91
+
global rootDir
92
+
global xpsData
93
+
94
+
print("------------------------------------------------------------")
95
+
print("---------------EXECUTING XPS PYTHON EXPORTER----------------")
96
+
print("------------------------------------------------------------")
97
+
print("Exporting file: ", xpsSettings.filename)
98
+
99
+
if xpsSettings.exportOnlySelected:
100
+
exportObjects = bpy.context.selected_objects
101
+
else:
102
+
exportObjects = bpy.context.visible_objects
103
+
104
+
selectedArmature, selectedMeshes = exportSelected(exportObjects)
105
+
106
+
xpsBones = exportArmature(selectedArmature)
107
+
xpsMeshes = exportMeshes(selectedArmature, selectedMeshes)
108
+
109
+
poseString = ''
110
+
if (xpsSettings.expDefPose):
111
+
xpsPoseData = export_xnalara_pose.xpsPoseData(selectedArmature)
112
+
poseString = write_ascii_xps.writePose(xpsPoseData).read()
113
+
114
+
header = None
115
+
hasHeader = bin_ops.hasHeader(xpsSettings.format)
116
+
if hasHeader:
117
+
header = mock_xps_data.buildHeader(poseString)
118
+
header.version_mayor = xpsSettings.versionMayor
119
+
header.version_minor = xpsSettings.versionMinor
120
+
xpsData = xps_types.XpsData(header=header, bones=xpsBones,
121
+
meshes=xpsMeshes)
122
+
123
+
saveXpsFile(xpsSettings.filename, xpsData)
124
+
125
+
126
+
def exportSelected(objects):
127
+
meshes = []
128
+
armatures = []
129
+
armature = None
130
+
for object in objects:
131
+
if object.type == 'ARMATURE':
132
+
armatures.append(object)
133
+
elif object.type == 'MESH':
134
+
meshes.append(object)
135
+
armature = object.find_armature() or armature
136
+
# armature = getArmature(objects)
137
+
return armature, meshes
138
+
139
+
140
+
def exportArmature(armature):
141
+
xpsBones = []
142
+
if armature:
143
+
bones = armature.data.bones
144
+
print('Exporting Armature', len(bones), 'Bones')
145
+
# activebones = [bone for bone in bones if bone.layers[0]]
146
+
activebones = bones
147
+
for bone in activebones:
148
+
objectMatrix = armature.matrix_local
149
+
id = bones.find(bone.name)
150
+
name = bone.name
151
+
co = coordTransform(objectMatrix @ bone.head_local.xyz)
152
+
parentId = None
153
+
if bone.parent:
154
+
parentId = bones.find(bone.parent.name)
155
+
xpsBone = xps_types.XpsBone(id, name, co, parentId)
156
+
xpsBones.append(xpsBone)
157
+
if not xpsBones:
158
+
xpsBone = xps_types.XpsBone(0, 'root', (0, 0, 0), -1)
159
+
xpsBones.append(xpsBone)
160
+
161
+
return xpsBones
162
+
163
+
164
+
def exportMeshes(selectedArmature, selectedMeshes):
165
+
xpsMeshes = []
166
+
for mesh in selectedMeshes:
167
+
print('Exporting Mesh:', mesh.name)
168
+
meshName = makeNamesFromMesh(mesh)
169
+
# meshName = makeNamesFromMaterials(mesh)
170
+
meshTextures = getXpsMatTextures(mesh)
171
+
meshVerts, meshFaces = getXpsVertices(selectedArmature, mesh)
172
+
meshUvCount = len(mesh.data.uv_layers)
173
+
174
+
materialsCount = len(mesh.data.materials)
175
+
if (materialsCount > 0):
176
+
for idx in range(materialsCount):
177
+
xpsMesh = xps_types.XpsMesh(meshName[idx], meshTextures[idx],
178
+
meshVerts[idx], meshFaces[idx],
179
+
meshUvCount)
180
+
xpsMeshes.append(xpsMesh)
181
+
else:
182
+
dummyTexture = [xps_types.XpsTexture(0, 'dummy.png', 0)]
183
+
xpsMesh = xps_types.XpsMesh(meshName[0], dummyTexture,
184
+
meshVerts[0], meshFaces[0],
185
+
meshUvCount)
186
+
xpsMeshes.append(xpsMesh)
187
+
188
+
return xpsMeshes
189
+
190
+
191
+
def makeNamesFromMaterials(mesh):
192
+
separatedMeshNames = []
193
+
materials = mesh.data.materials
194
+
for material in materials:
195
+
separatedMeshNames.append(material.name)
196
+
return separatedMeshNames
197
+
198
+
199
+
def makeNamesFromMesh(mesh):
200
+
meshFullName = mesh.name
201
+
renderType = xps_material.makeRenderType(meshFullName)
202
+
meshName = renderType.meshName
203
+
204
+
separatedMeshNames = []
205
+
separatedMeshNames.append(xps_material.makeRenderTypeName(renderType))
206
+
207
+
materialsCount = len(mesh.data.materials)
208
+
# separate mesh by materials
209
+
for mat_idx in range(1, materialsCount):
210
+
partName = '{0}.material{1:02d}'.format(meshName, mat_idx)
211
+
renderType.meshName = partName
212
+
fullName = xps_material.makeRenderTypeName(renderType)
213
+
separatedMeshNames.append(fullName)
214
+
return separatedMeshNames
215
+
216
+
217
+
def addTexture(tex_dic, texture_type, texture):
218
+
if texture is not None:
219
+
tex_dic[texture_type] = texture
220
+
221
+
222
+
def getTextureFilename(texture):
223
+
textureFile = None
224
+
if texture and texture.image is not None:
225
+
texFilePath = texture.image.filepath
226
+
absFilePath = bpy.path.abspath(texFilePath)
227
+
texturePath, textureFile = os.path.split(absFilePath)
228
+
return textureFile
229
+
230
+
231
+
def makeXpsTexture(mesh, material):
232
+
active_uv = mesh.data.uv_layers.active
233
+
active_uv_index = mesh.data.uv_layers.active_index
234
+
xpsShaderWrapper = node_shader_utils.XPSShaderWrapper(material)
235
+
236
+
tex_dic = {}
237
+
texture = getTextureFilename(xpsShaderWrapper.diffuse_texture)
238
+
addTexture(tex_dic, xps_material.TextureType.DIFFUSE, texture)
239
+
texture = getTextureFilename(xpsShaderWrapper.lightmap_texture)
240
+
addTexture(tex_dic, xps_material.TextureType.LIGHT, texture)
241
+
texture = getTextureFilename(xpsShaderWrapper.normalmap_texture)
242
+
addTexture(tex_dic, xps_material.TextureType.BUMP, texture)
243
+
texture = getTextureFilename(xpsShaderWrapper.normal_mask_texture)
244
+
addTexture(tex_dic, xps_material.TextureType.MASK, texture)
245
+
texture = getTextureFilename(xpsShaderWrapper.microbump1_texture)
246
+
addTexture(tex_dic, xps_material.TextureType.BUMP1, texture)
247
+
texture = getTextureFilename(xpsShaderWrapper.microbump2_texture)
248
+
addTexture(tex_dic, xps_material.TextureType.BUMP2, texture)
249
+
texture = getTextureFilename(xpsShaderWrapper.specular_texture)
250
+
addTexture(tex_dic, xps_material.TextureType.SPECULAR, texture)
251
+
texture = getTextureFilename(xpsShaderWrapper.environment_texture)
252
+
addTexture(tex_dic, xps_material.TextureType.ENVIRONMENT, texture)
253
+
texture = getTextureFilename(xpsShaderWrapper.emission_texture)
254
+
addTexture(tex_dic, xps_material.TextureType.EMISSION, texture)
255
+
256
+
renderType = xps_material.makeRenderType(mesh.name)
257
+
renderGroup = xps_material.RenderGroup(renderType)
258
+
rgTextures = renderGroup.rgTexType
259
+
260
+
texutre_list = []
261
+
for tex_type in rgTextures:
262
+
texture = tex_dic.get(tex_type, 'missing.png')
263
+
texutre_list.append(texture)
264
+
265
+
xpsTextures = []
266
+
for id, textute in enumerate(texutre_list):
267
+
xpsTexture = xps_types.XpsTexture(id, textute, 0)
268
+
xpsTextures.append(xpsTexture)
269
+
270
+
return xpsTextures
271
+
272
+
273
+
def getTextures(mesh, material):
274
+
textures = []
275
+
xpsTextures = makeXpsTexture(mesh, material)
276
+
return xpsTextures
277
+
278
+
279
+
def getXpsMatTextures(mesh):
280
+
xpsMatTextures = []
281
+
for material_slot in mesh.material_slots:
282
+
material = material_slot.material
283
+
xpsTextures = getTextures(mesh, material)
284
+
xpsMatTextures.append(xpsTextures)
285
+
return xpsMatTextures
286
+
287
+
288
+
def generateVertexKey(vertex, uvCoord, seamSideId):
289
+
# Generate a unique key for vertex using coords,normal,
290
+
# first UV and side of seam
291
+
key = '{}{}{}{}'.format(vertex.co, vertex.normal, uvCoord, seamSideId)
292
+
return key
293
+
294
+
295
+
def getXpsVertices(selectedArmature, mesh):
296
+
mapMatVertexKeys = [] # remap vertex index
297
+
xpsMatVertices = [] # Vertices separated by material
298
+
xpsMatFaces = [] # Faces separated by material
299
+
# xpsVertices = [] # list of vertices for a single material
300
+
# xpsFaces = [] # list of faces for a single material
301
+
302
+
exportVertColors = xpsSettings.vColors
303
+
armature = mesh.find_armature()
304
+
objectMatrix = mesh.matrix_world
305
+
rotQuaternion = mesh.matrix_world.to_quaternion()
306
+
307
+
verts_nor = xpsSettings.exportNormals
308
+
309
+
# Calculates tesselated faces and normal split to make them available for export
310
+
if (bpy.app.version[0:2] in [(3, 6), (4, 0)]):
311
+
mesh.data.calc_normals_split()
312
+
mesh.data.calc_loop_triangles()
313
+
mesh.data.update(calc_edges=True)
314
+
mesh.data.calc_loop_triangles()
315
+
316
+
matCount = len(mesh.data.materials)
317
+
if (matCount > 0):
318
+
for idx in range(matCount):
319
+
xpsMatVertices.append([]) # Vertices separated by material
320
+
xpsMatFaces.append([]) # Faces separated by material
321
+
mapMatVertexKeys.append({})
322
+
else:
323
+
xpsMatVertices.append([]) # Vertices separated by material
324
+
xpsMatFaces.append([]) # Faces separated by material
325
+
mapMatVertexKeys.append({})
326
+
327
+
meshVerts = mesh.data.vertices
328
+
meshEdges = mesh.data.edges
329
+
# tessface accelerator
330
+
hasSeams = any(edge.use_seam for edge in meshEdges)
331
+
tessFaces = mesh.data.loop_triangles[:]
332
+
# tessFaces = mesh.data.tessfaces
333
+
tessface_uv_tex = mesh.data.uv_layers
334
+
tessface_vert_color = mesh.data.vertex_colors
335
+
meshEdgeKeys = mesh.data.edge_keys
336
+
337
+
vertEdges = [[] for x in range(len(meshVerts))]
338
+
tessEdgeFaces = {}
339
+
340
+
preserveSeams = xpsSettings.preserveSeams
341
+
if (preserveSeams and hasSeams):
342
+
# Count edges for faces
343
+
tessEdgeCount = Counter(tessEdgeKey for tessFace in tessFaces for tessEdgeKey in tessFace.edge_keys)
344
+
345
+
# create dictionary. faces for each edge
346
+
for tessface in tessFaces:
347
+
for tessEdgeKey in tessface.edge_keys:
348
+
if tessEdgeFaces.get(tessEdgeKey) is None:
349
+
tessEdgeFaces[tessEdgeKey] = []
350
+
tessEdgeFaces[tessEdgeKey].append(tessface.index)
351
+
352
+
# use Dict to speedup search
353
+
edgeKeyIndex = {val: index for index, val in enumerate(meshEdgeKeys)}
354
+
355
+
# create dictionary. Edges connected to each Vert
356
+
for key in meshEdgeKeys:
357
+
meshEdge = meshEdges[edgeKeyIndex[key]]
358
+
vert1, vert2 = key
359
+
vertEdges[vert1].append(meshEdge)
360
+
vertEdges[vert2].append(meshEdge)
361
+
362
+
faceEdges = []
363
+
faceSeams = []
364
+
365
+
for face in tessFaces:
366
+
# faceIdx = face.index
367
+
material_index = face.material_index
368
+
xpsVertices = xpsMatVertices[material_index]
369
+
xpsFaces = xpsMatFaces[material_index]
370
+
mapVertexKeys = mapMatVertexKeys[material_index]
371
+
faceVerts = []
372
+
seamSideId = ''
373
+
faceVertIndices = face.vertices[:]
374
+
faceUvIndices = face.loops[:]
375
+
376
+
for vertEnum, vertIndex in enumerate(faceVertIndices):
377
+
vertex = meshVerts[vertIndex]
378
+
379
+
if (preserveSeams and hasSeams):
380
+
connectedFaces = set()
381
+
faceEdges = vertEdges[vertIndex]
382
+
faceSeams = [edge for edge in faceEdges if edge.use_seam]
383
+
384
+
if (len(faceSeams) >= 1):
385
+
vertIsBorder = any(tessEdgeCount[edge.index] != 2 for edge in faceEdges)
386
+
if (len(faceSeams) > 1) or (len(faceSeams) == 1 and vertIsBorder):
387
+
388
+
oldFacesList = set()
389
+
connectedFaces = set([face])
390
+
while oldFacesList != connectedFaces:
391
+
392
+
oldFacesList = connectedFaces
393
+
394
+
allEdgeKeys = set(connEdgeKey for connface in connectedFaces for connEdgeKey in connface.edge_keys)
395
+
connEdgesKeys = [edge.key for edge in faceEdges]
396
+
connEdgesNotSeamsKeys = [seam.key for seam in faceSeams]
397
+
398
+
connectedEdges = allEdgeKeys.intersection(connEdgesKeys).difference(connEdgesNotSeamsKeys)
399
+
connectedFaces = set(tessFaces[connFace] for connEdge in connectedEdges for connFace in tessEdgeFaces[connEdge])
400
+
401
+
connectedFaces.add(face)
402
+
403
+
faceIndices = [face.index for face in connectedFaces]
404
+
seamSideId = '|'.join(str(faceIdx) for faceIdx in sorted(faceIndices))
405
+
406
+
uvs = getUvs(tessface_uv_tex, faceUvIndices[vertEnum])
407
+
vertexKey = generateVertexKey(vertex, uvs, seamSideId)
408
+
409
+
if vertexKey in mapVertexKeys:
410
+
vertexID = mapVertexKeys[vertexKey]
411
+
else:
412
+
vCoord = coordTransform(objectMatrix @ vertex.co)
413
+
if verts_nor:
414
+
normal = Vector(face.split_normals[vertEnum])
415
+
else:
416
+
normal = vertex.normal
417
+
norm = coordTransform(rotQuaternion @ normal)
418
+
vColor = getVertexColor(exportVertColors, tessface_vert_color, faceUvIndices[vertEnum])
419
+
boneId, boneWeight = getBoneWeights(mesh, vertex, armature)
420
+
421
+
boneWeights = []
422
+
for idx in range(len(boneId)):
423
+
boneWeights.append(xps_types.BoneWeight(boneId[idx],
424
+
boneWeight[idx]))
425
+
vertexID = len(xpsVertices)
426
+
mapVertexKeys[vertexKey] = vertexID
427
+
xpsVertex = xps_types.XpsVertex(vertexID, vCoord, norm, vColor, uvs,
428
+
boneWeights)
429
+
xpsVertices.append(xpsVertex)
430
+
faceVerts.append(vertexID)
431
+
432
+
meshFaces = getXpsFace(faceVerts)
433
+
xpsFaces.extend(meshFaces)
434
+
435
+
return xpsMatVertices, xpsMatFaces
436
+
437
+
438
+
def getUvs(tessface_uv_tex, uvIndex):
439
+
uvs = []
440
+
for tessface_uv_layer in tessface_uv_tex:
441
+
uvCoord = tessface_uv_layer.data[uvIndex].uv
442
+
uvCoord = uvTransform(uvCoord)
443
+
uvs.append(uvCoord)
444
+
return uvs
445
+
446
+
447
+
def getVertexColor(exportVertColors, tessface_vert_color, vColorIndex):
448
+
vColor = None
449
+
if exportVertColors and tessface_vert_color:
450
+
vColor = tessface_vert_color[0].data[vColorIndex].color[:]
451
+
else:
452
+
vColor = [1, 1, 1, 1]
453
+
454
+
vColor = list(map(rangeFloatToByte, vColor))
455
+
return vColor
456
+
457
+
458
+
def getBoneWeights(mesh, vertice, armature):
459
+
boneId = []
460
+
boneWeight = []
461
+
if armature:
462
+
for vertGroup in vertice.groups:
463
+
# Vertex Group
464
+
groupIdx = vertGroup.group
465
+
boneName = mesh.vertex_groups[groupIdx].name
466
+
boneIdx = armature.data.bones.find(boneName)
467
+
weight = vertGroup.weight
468
+
if boneIdx < 0:
469
+
boneIdx = 0
470
+
weight = 0
471
+
boneId.append(boneIdx)
472
+
boneWeight.append(weight)
473
+
boneId = fillArray(boneId, 4, 0)
474
+
boneWeight = fillArray(boneWeight, 4, 0)
475
+
return boneId, boneWeight
476
+
477
+
478
+
def getXpsFace(faceVerts):
479
+
xpsFaces = []
480
+
481
+
if len(faceVerts) == 3:
482
+
xpsFaces.append(faceTransform(faceVerts))
483
+
elif len(faceVerts) == 4:
484
+
v1, v2, v3, v4 = faceVerts
485
+
xpsFaces.append(faceTransform((v1, v2, v3)))
486
+
xpsFaces.append(faceTransform((v3, v4, v1)))
487
+
488
+
return xpsFaces
489
+
490
+
491
+
def boneDictGenerate(filepath, armatureObj):
492
+
boneNames = sorted([import_xnalara_pose.renameBoneToXps(name) for name in armatureObj.data.bones.keys()])
493
+
boneDictList = '\n'.join(';'.join((name,) * 2) for name in boneNames)
494
+
write_ascii_xps.writeBoneDict(filepath, boneDictList)
495
+
496
+
497
+
if __name__ == "__main__":
498
+
uvDisplX = 0
499
+
uvDisplY = 0
500
+
exportOnlySelected = True
501
+
exportPose = False
502
+
modProtected = False
503
+
filename1 = (r'G:\3DModeling\XNALara\XNALara_XPS\data\TESTING5\Drake\RECB '
504
+
r'DRAKE Pack_By DamianHandy\DRAKE Sneaking Suit - Open_by '
505
+
r'DamianHandy\Generic_Item - BLENDER pose.mesh')
506
+
507
+
filename = r'C:\XPS Tutorial\Yaiba MOMIJIII\momi.mesh.ascii'
508
+
509
+
xpsSettings = xps_types.XpsImportSettings(filename, uvDisplX, uvDisplY,
510
+
exportOnlySelected, exportPose,
511
+
modProtected)
512
+
513
+
getOutputFilename(xpsSettings)
+177
xnalara_io_Tools/export_xnalara_pose.py
+177
xnalara_io_Tools/export_xnalara_pose.py
···
1
+
from math import degrees
2
+
import os
3
+
import re
4
+
5
+
from . import write_ascii_xps
6
+
from . import xps_types
7
+
from .timing import timing
8
+
import bpy
9
+
from mathutils import Vector
10
+
11
+
12
+
def getOutputPoseSequence(filename):
13
+
filepath, file = os.path.split(filename)
14
+
basename, ext = os.path.splitext(file)
15
+
poseSuffix = re.sub(r'\d+$', '', basename)
16
+
17
+
startFrame = bpy.context.scene.frame_start
18
+
endFrame = bpy.context.scene.frame_end
19
+
initialFrame = bpy.context.scene.frame_current
20
+
21
+
for currFrame in range(startFrame, endFrame + 1):
22
+
bpy.context.scene.frame_set(currFrame)
23
+
numSuffix = '{:0>3d}'.format(currFrame)
24
+
name = poseSuffix + numSuffix + ext
25
+
26
+
newPoseFilename = os.path.join(filepath, name)
27
+
getOutputFilename(newPoseFilename)
28
+
29
+
bpy.context.scene.frame_current = initialFrame
30
+
31
+
32
+
def getOutputFilename(filename):
33
+
blenderExportSetup()
34
+
xpsExport(filename)
35
+
blenderExportFinalize()
36
+
37
+
38
+
def blenderExportSetup():
39
+
pass
40
+
41
+
42
+
def blenderExportFinalize():
43
+
pass
44
+
45
+
46
+
def saveXpsFile(filename, xpsPoseData):
47
+
# dirpath, file = os.path.split(filename)
48
+
# basename, ext = os.path.splitext(file)
49
+
write_ascii_xps.writeXpsPose(filename, xpsPoseData)
50
+
51
+
52
+
@timing
53
+
def xpsExport(filename):
54
+
global rootDir
55
+
global xpsData
56
+
57
+
print("------------------------------------------------------------")
58
+
print("---------------EXECUTING XPS PYTHON EXPORTER----------------")
59
+
print("------------------------------------------------------------")
60
+
print("Exporting Pose: ", filename)
61
+
62
+
rootDir, file = os.path.split(filename)
63
+
print('rootDir: {}'.format(rootDir))
64
+
65
+
xpsPoseData = exportPose()
66
+
67
+
saveXpsFile(filename, xpsPoseData)
68
+
69
+
70
+
def exportPose():
71
+
armature = next((obj for obj in bpy.context.selected_objects
72
+
if obj.type == 'ARMATURE'), None)
73
+
boneCount = len(armature.data.bones)
74
+
print('Exporting Pose', str(boneCount), 'bones')
75
+
76
+
return xpsPoseData(armature)
77
+
78
+
79
+
def xpsPoseData(armature):
80
+
context = bpy.context
81
+
currentMode = bpy.context.mode
82
+
currentObj = bpy.context.active_object
83
+
context.view_layer.objects.active = armature
84
+
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
85
+
86
+
bpy.ops.object.mode_set(mode='POSE')
87
+
bpy.ops.pose.select_all(action='DESELECT')
88
+
bones = armature.pose.bones
89
+
objectMatrix = armature.matrix_world
90
+
91
+
xpsPoseData = {}
92
+
for poseBone in bones:
93
+
boneName = poseBone.name
94
+
boneData = xpsPoseBone(poseBone, objectMatrix)
95
+
xpsPoseData[boneName] = boneData
96
+
97
+
bpy.ops.object.posemode_toggle()
98
+
context.view_layer.objects.active = currentObj
99
+
bpy.ops.object.mode_set(mode=currentMode)
100
+
101
+
return xpsPoseData
102
+
103
+
104
+
def xpsPoseBone(poseBone, objectMatrix):
105
+
boneName = poseBone.name
106
+
boneRotDelta = xpsBoneRotate(poseBone)
107
+
boneCoordDelta = xpsBoneTranslate(poseBone, objectMatrix)
108
+
boneScale = xpsBoneScale(poseBone)
109
+
boneData = xps_types.XpsBonePose(boneName, boneCoordDelta, boneRotDelta,
110
+
boneScale)
111
+
return boneData
112
+
113
+
114
+
def eulerToXpsBoneRot(rotEuler):
115
+
xDeg = degrees(rotEuler.x)
116
+
yDeg = degrees(rotEuler.y)
117
+
zDeg = degrees(rotEuler.z)
118
+
return Vector((xDeg, yDeg, zDeg))
119
+
120
+
121
+
def vectorTransform(vec):
122
+
x = vec.x
123
+
y = vec.y
124
+
z = vec.z
125
+
y = -y
126
+
newVec = Vector((x, z, y))
127
+
return newVec
128
+
129
+
130
+
def vectorTransformTranslate(vec):
131
+
x = vec.x
132
+
y = vec.y
133
+
z = vec.z
134
+
y = -y
135
+
newVec = Vector((x, z, y))
136
+
return newVec
137
+
138
+
139
+
def vectorTransformScale(vec):
140
+
x = vec.x
141
+
y = vec.y
142
+
z = vec.z
143
+
newVec = Vector((x, y, z))
144
+
return newVec
145
+
146
+
147
+
def xpsBoneRotate(poseBone):
148
+
# LOCAL PoseBone
149
+
poseMatGlobal = poseBone.matrix_basis.to_quaternion()
150
+
# LOCAL EditBoneRot
151
+
editMatLocal = poseBone.bone.matrix_local.to_quaternion()
152
+
153
+
rotQuat = editMatLocal @ poseMatGlobal @ editMatLocal.inverted()
154
+
rotEuler = rotQuat.to_euler('YXZ')
155
+
xpsRot = eulerToXpsBoneRot(rotEuler)
156
+
rot = vectorTransform(xpsRot)
157
+
return rot
158
+
159
+
160
+
def xpsBoneTranslate(poseBone, objectMatrix):
161
+
translate = poseBone.location
162
+
# LOCAL EditBoneRot
163
+
editMatLocal = poseBone.bone.matrix_local.to_quaternion()
164
+
vector = editMatLocal @ translate
165
+
return vectorTransformTranslate(objectMatrix.to_3x3() @ vector)
166
+
167
+
168
+
def xpsBoneScale(poseBone):
169
+
scale = poseBone.scale
170
+
return vectorTransformScale(scale)
171
+
172
+
173
+
if __name__ == "__main__":
174
+
writePosefilename0 = (r"G:\3DModeling\XNALara\XNALara_XPS\dataTest\Models"
175
+
r"\Queen's Blade\echidna pose - copy.pose")
176
+
177
+
getOutputFilename(writePosefilename0)
xnalara_io_Tools/icons/icon.png
xnalara_io_Tools/icons/icon.png
This is a binary file and will not be displayed.
xnalara_io_Tools/icons/icon_256x256.png
xnalara_io_Tools/icons/icon_256x256.png
This is a binary file and will not be displayed.
+1498
xnalara_io_Tools/import_obj.py
+1498
xnalara_io_Tools/import_obj.py
···
1
+
# ##### BEGIN GPL LICENSE BLOCK #####
2
+
#
3
+
# This program is free software; you can redistribute it and/or
4
+
# modify it under the terms of the GNU General Public License
5
+
# as published by the Free Software Foundation; either version 2
6
+
# of the License, or (at your option) any later version.
7
+
#
8
+
# This program is distributed in the hope that it will be useful,
9
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+
# GNU General Public License for more details.
12
+
#
13
+
# You should have received a copy of the GNU General Public License
14
+
# along with this program; if not, write to the Free Software Foundation,
15
+
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
16
+
#
17
+
# ##### END GPL LICENSE BLOCK #####
18
+
19
+
# Script copyright (C) Campbell Barton
20
+
# Contributors: Campbell Barton, Jiri Hnidek, Paolo Ciccone
21
+
22
+
"""
23
+
This script imports a Wavefront OBJ files to Blender.
24
+
25
+
Usage:
26
+
Run this script from "File->Import" menu and then load the desired OBJ file.
27
+
Note, This loads mesh objects and materials only, nurbs and curves are not supported.
28
+
29
+
http://wiki.blender.org/index.php/Scripts/Manual/Import/wavefront_obj
30
+
"""
31
+
import ast
32
+
import array
33
+
import os
34
+
import bpy
35
+
import mathutils
36
+
from bpy_extras.io_utils import unpack_list
37
+
from bpy_extras.image_utils import load_image
38
+
39
+
from bpy_extras.wm_utils.progress_report import (
40
+
ProgressReport,
41
+
ProgressReportSubstep,
42
+
)
43
+
44
+
45
+
def line_value(line_split):
46
+
"""
47
+
Returns 1 string representing the value for this line
48
+
None will be returned if theres only 1 word
49
+
"""
50
+
length = len(line_split)
51
+
if length == 1:
52
+
return None
53
+
54
+
elif length == 2:
55
+
return line_split[1]
56
+
57
+
elif length > 2:
58
+
return b' '.join(line_split[1:])
59
+
60
+
61
+
def obj_image_load(imagepath, DIR, recursive, relpath):
62
+
"""
63
+
Mainly uses comprehensiveImageLoad
64
+
but tries to replace '_' with ' ' for Max's exporter replaces spaces with underscores.
65
+
"""
66
+
if "_" in imagepath:
67
+
image = load_image(imagepath.replace("_", " "), DIR, recursive=recursive, relpath=relpath)
68
+
if image:
69
+
return image
70
+
71
+
return load_image(imagepath, DIR, recursive=recursive, place_holder=True, relpath=relpath)
72
+
73
+
74
+
def create_materials(filepath, relpath,
75
+
material_libs, unique_materials, unique_material_images,
76
+
use_image_search, float_func):
77
+
"""
78
+
Create all the used materials in this obj,
79
+
assign colors and images to the materials from all referenced material libs
80
+
"""
81
+
DIR = os.path.dirname(filepath)
82
+
context_material_vars = set()
83
+
84
+
# Don't load the same image multiple times
85
+
context_imagepath_map = {}
86
+
87
+
def load_material_image(blender_material, context_material_name, img_data, type):
88
+
"""
89
+
Set textures defined in .mtl file.
90
+
"""
91
+
imagepath = os.fsdecode(img_data[-1])
92
+
map_options = {}
93
+
94
+
curr_token = []
95
+
for token in img_data[:-1]:
96
+
if token.startswith(b'-'):
97
+
if curr_token:
98
+
map_options[curr_token[0]] = curr_token[1:]
99
+
curr_token[:] = []
100
+
curr_token.append(token)
101
+
102
+
texture = bpy.data.textures.new(name=type, type='IMAGE')
103
+
104
+
# Absolute path - c:\.. etc would work here
105
+
image = context_imagepath_map.get(imagepath, ...)
106
+
if image == ...:
107
+
image = context_imagepath_map[imagepath] = \
108
+
obj_image_load(imagepath, DIR, use_image_search, relpath)
109
+
110
+
if image is not None:
111
+
texture.image = image
112
+
113
+
# Adds textures for materials (rendering)
114
+
if type == 'Kd':
115
+
mtex = blender_material.texture_slots.add()
116
+
mtex.texture = texture
117
+
mtex.texture_coords = 'UV'
118
+
mtex.use_map_color_diffuse = True
119
+
120
+
# adds textures to faces (Textured/Alt-Z mode)
121
+
# Only apply the diffuse texture to the face if the image has not been set with the inline usemat func.
122
+
unique_material_images[context_material_name] = image # set the texface image
123
+
124
+
elif type == 'Ka':
125
+
mtex = blender_material.texture_slots.add()
126
+
mtex.use_map_color_diffuse = False
127
+
128
+
mtex.texture = texture
129
+
mtex.texture_coords = 'UV'
130
+
mtex.use_map_ambient = True
131
+
132
+
elif type == 'Ks':
133
+
mtex = blender_material.texture_slots.add()
134
+
mtex.use_map_color_diffuse = False
135
+
136
+
mtex.texture = texture
137
+
mtex.texture_coords = 'UV'
138
+
mtex.use_map_color_spec = True
139
+
140
+
elif type == 'Ke':
141
+
mtex = blender_material.texture_slots.add()
142
+
mtex.use_map_color_diffuse = False
143
+
144
+
mtex.texture = texture
145
+
mtex.texture_coords = 'UV'
146
+
mtex.use_map_emit = True
147
+
148
+
elif type == 'Bump':
149
+
mtex = blender_material.texture_slots.add()
150
+
mtex.use_map_color_diffuse = False
151
+
152
+
mtex.texture = texture
153
+
mtex.texture.use_normal_map = True
154
+
mtex.texture_coords = 'UV'
155
+
mtex.use_map_normal = True
156
+
157
+
bump_mult = map_options.get(b'-bm')
158
+
if bump_mult:
159
+
mtex.normal_factor = bump_mult[0]
160
+
161
+
elif type == 'D':
162
+
mtex = blender_material.texture_slots.add()
163
+
mtex.use_map_color_diffuse = False
164
+
165
+
mtex.texture = texture
166
+
mtex.texture_coords = 'UV'
167
+
mtex.use_map_alpha = True
168
+
blender_material.use_transparency = True
169
+
blender_material.transparency_method = 'Z_TRANSPARENCY'
170
+
if "alpha" not in context_material_vars:
171
+
blender_material.alpha = 0.0
172
+
# Todo, unset deffuse material alpha if it has an alpha channel
173
+
174
+
elif type == 'disp':
175
+
mtex = blender_material.texture_slots.add()
176
+
mtex.use_map_color_diffuse = False
177
+
178
+
mtex.texture = texture
179
+
mtex.texture_coords = 'UV'
180
+
mtex.use_map_displacement = True
181
+
182
+
elif type == 'refl':
183
+
mtex = blender_material.texture_slots.add()
184
+
mtex.use_map_color_diffuse = False
185
+
186
+
mtex.texture = texture
187
+
mtex.texture_coords = 'REFLECTION'
188
+
mtex.use_map_color_diffuse = True
189
+
190
+
map_type = map_options.get(b'-type')
191
+
if map_type and map_type != [b'sphere']:
192
+
print("WARNING, unsupported reflection type '%s', defaulting to 'sphere'"
193
+
"" % ' '.join(i.decode() for i in map_type))
194
+
mtex.mapping = 'SPHERE'
195
+
else:
196
+
raise Exception("invalid type %r" % type)
197
+
198
+
map_offset = map_options.get(b'-o')
199
+
map_scale = map_options.get(b'-s')
200
+
if map_offset:
201
+
mtex.offset.x = float(map_offset[0])
202
+
if len(map_offset) >= 2:
203
+
mtex.offset.y = float(map_offset[1])
204
+
if len(map_offset) >= 3:
205
+
mtex.offset.z = float(map_offset[2])
206
+
if map_scale:
207
+
mtex.scale.x = float(map_scale[0])
208
+
if len(map_scale) >= 2:
209
+
mtex.scale.y = float(map_scale[1])
210
+
if len(map_scale) >= 3:
211
+
mtex.scale.z = float(map_scale[2])
212
+
213
+
# Add an MTL with the same name as the obj if no MTLs are spesified.
214
+
temp_mtl = os.path.splitext((os.path.basename(filepath)))[0] + ".mtl"
215
+
216
+
if os.path.exists(os.path.join(DIR, temp_mtl)):
217
+
material_libs.add(temp_mtl)
218
+
del temp_mtl
219
+
220
+
# Create new materials
221
+
for name in unique_materials: # .keys()
222
+
if name is not None:
223
+
unique_materials[name] = bpy.data.materials.new(name.decode('utf-8', "replace"))
224
+
unique_material_images[name] = None # assign None to all material images to start with, add to later.
225
+
226
+
# XXX Why was this needed? Cannot find any good reason, and adds stupid empty matslot in case we do not separate
227
+
# mesh (see T44947).
228
+
# ~ unique_materials[None] = None
229
+
# ~ unique_material_images[None] = None
230
+
231
+
for libname in sorted(material_libs):
232
+
# print(libname)
233
+
mtlpath = os.path.join(DIR, libname)
234
+
if not os.path.exists(mtlpath):
235
+
print("\tMaterial not found MTL: %r" % mtlpath)
236
+
else:
237
+
do_ambient = True
238
+
do_highlight = False
239
+
do_reflection = False
240
+
do_transparency = False
241
+
do_glass = False
242
+
do_fresnel = False
243
+
do_raytrace = False
244
+
emit_colors = [0.0, 0.0, 0.0]
245
+
246
+
# print('\t\tloading mtl: %e' % mtlpath)
247
+
context_material = None
248
+
mtl = open(mtlpath, 'rb')
249
+
for line in mtl: # .readlines():
250
+
line = line.strip()
251
+
if not line or line.startswith(b'#'):
252
+
continue
253
+
254
+
line_split = line.split()
255
+
line_id = line_split[0].lower()
256
+
257
+
if line_id == b'newmtl':
258
+
# Finalize previous mat, if any.
259
+
if context_material:
260
+
emit_value = sum(emit_colors) / 3.0
261
+
if emit_value > 1e-6:
262
+
# We have to adapt it to diffuse color too...
263
+
emit_value /= sum(context_material.diffuse_color) / 3.0
264
+
context_material.emit = emit_value
265
+
266
+
if not do_ambient:
267
+
context_material.ambient = 0.0
268
+
269
+
if do_highlight:
270
+
# FIXME, how else to use this?
271
+
context_material.specular_intensity = 1.0
272
+
273
+
if do_reflection:
274
+
context_material.raytrace_mirror.use = True
275
+
context_material.raytrace_mirror.reflect_factor = 1.0
276
+
277
+
if do_transparency:
278
+
context_material.use_transparency = True
279
+
context_material.transparency_method = 'RAYTRACE' if do_raytrace else 'Z_TRANSPARENCY'
280
+
if "alpha" not in context_material_vars:
281
+
context_material.alpha = 0.0
282
+
283
+
if do_glass:
284
+
if "ior" not in context_material_vars:
285
+
context_material.raytrace_transparency.ior = 1.5
286
+
287
+
if do_fresnel:
288
+
context_material.raytrace_mirror.fresnel = 1.0 # could be any value for 'ON'
289
+
290
+
"""
291
+
if do_raytrace:
292
+
context_material.use_raytrace = True
293
+
else:
294
+
context_material.use_raytrace = False
295
+
"""
296
+
# XXX, this is not following the OBJ spec, but this was
297
+
# written when raytracing wasnt default, annoying to disable for blender users.
298
+
context_material.use_raytrace = True
299
+
300
+
context_material_name = line_value(line_split)
301
+
context_material = unique_materials.get(context_material_name)
302
+
context_material_vars.clear()
303
+
304
+
emit_colors[:] = [0.0, 0.0, 0.0]
305
+
do_ambient = True
306
+
do_highlight = False
307
+
do_reflection = False
308
+
do_transparency = False
309
+
do_glass = False
310
+
do_fresnel = False
311
+
do_raytrace = False
312
+
313
+
elif context_material:
314
+
# we need to make a material to assign properties to it.
315
+
if line_id == b'ka':
316
+
context_material.mirror_color = (
317
+
float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3]))
318
+
# This is highly approximated, but let's try to stick as close from exporter as possible... :/
319
+
context_material.ambient = sum(context_material.mirror_color) / 3
320
+
elif line_id == b'kd':
321
+
context_material.diffuse_color = (
322
+
float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3]))
323
+
context_material.diffuse_intensity = 1.0
324
+
elif line_id == b'ks':
325
+
context_material.specular_color = (
326
+
float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3]))
327
+
context_material.specular_intensity = 1.0
328
+
elif line_id == b'ke':
329
+
# We cannot set context_material.emit right now, we need final diffuse color as well for this.
330
+
emit_colors[:] = [
331
+
float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])]
332
+
elif line_id == b'ns':
333
+
context_material.specular_hardness = int((float_func(line_split[1]) * 0.51) + 1)
334
+
elif line_id == b'ni': # Refraction index (between 1 and 3).
335
+
context_material.raytrace_transparency.ior = max(1, min(float_func(line_split[1]), 3))
336
+
context_material_vars.add("ior")
337
+
elif line_id == b'd': # dissolve (transparency)
338
+
context_material.alpha = float_func(line_split[1])
339
+
context_material.use_transparency = True
340
+
context_material.transparency_method = 'Z_TRANSPARENCY'
341
+
context_material_vars.add("alpha")
342
+
elif line_id == b'tr': # translucency
343
+
context_material.translucency = float_func(line_split[1])
344
+
elif line_id == b'tf':
345
+
# rgb, filter color, blender has no support for this.
346
+
pass
347
+
elif line_id == b'illum':
348
+
illum = int(line_split[1])
349
+
350
+
# inline comments are from the spec, v4.2
351
+
if illum == 0:
352
+
# Color on and Ambient off
353
+
do_ambient = False
354
+
elif illum == 1:
355
+
# Color on and Ambient on
356
+
pass
357
+
elif illum == 2:
358
+
# Highlight on
359
+
do_highlight = True
360
+
elif illum == 3:
361
+
# Reflection on and Ray trace on
362
+
do_reflection = True
363
+
do_raytrace = True
364
+
elif illum == 4:
365
+
# Transparency: Glass on
366
+
# Reflection: Ray trace on
367
+
do_transparency = True
368
+
do_reflection = True
369
+
do_glass = True
370
+
do_raytrace = True
371
+
elif illum == 5:
372
+
# Reflection: Fresnel on and Ray trace on
373
+
do_reflection = True
374
+
do_fresnel = True
375
+
do_raytrace = True
376
+
elif illum == 6:
377
+
# Transparency: Refraction on
378
+
# Reflection: Fresnel off and Ray trace on
379
+
do_transparency = True
380
+
do_reflection = True
381
+
do_raytrace = True
382
+
elif illum == 7:
383
+
# Transparency: Refraction on
384
+
# Reflection: Fresnel on and Ray trace on
385
+
do_transparency = True
386
+
do_reflection = True
387
+
do_fresnel = True
388
+
do_raytrace = True
389
+
elif illum == 8:
390
+
# Reflection on and Ray trace off
391
+
do_reflection = True
392
+
elif illum == 9:
393
+
# Transparency: Glass on
394
+
# Reflection: Ray trace off
395
+
do_transparency = True
396
+
do_reflection = True
397
+
do_glass = True
398
+
elif illum == 10:
399
+
# Casts shadows onto invisible surfaces
400
+
401
+
# blender can't do this
402
+
pass
403
+
404
+
elif line_id == b'map_ka':
405
+
img_data = line.split()[1:]
406
+
if img_data:
407
+
load_material_image(context_material, context_material_name, img_data, 'Ka')
408
+
elif line_id == b'map_ks':
409
+
img_data = line.split()[1:]
410
+
if img_data:
411
+
load_material_image(context_material, context_material_name, img_data, 'Ks')
412
+
elif line_id == b'map_kd':
413
+
img_data = line.split()[1:]
414
+
if img_data:
415
+
load_material_image(context_material, context_material_name, img_data, 'Kd')
416
+
elif line_id == b'map_ke':
417
+
img_data = line.split()[1:]
418
+
if img_data:
419
+
load_material_image(context_material, context_material_name, img_data, 'Ke')
420
+
elif line_id in {b'map_kn', b'map_bump', b'bump'}: # 'bump' is incorrect but some files use it.
421
+
img_data = line.split()[1:]
422
+
if img_data:
423
+
load_material_image(context_material, context_material_name, img_data, 'Bump')
424
+
elif line_id in {b'map_d', b'map_tr'}: # Alpha map - Dissolve
425
+
img_data = line.split()[1:]
426
+
if img_data:
427
+
load_material_image(context_material, context_material_name, img_data, 'D')
428
+
429
+
elif line_id in {b'map_disp', b'disp'}: # displacementmap
430
+
img_data = line.split()[1:]
431
+
if img_data:
432
+
load_material_image(context_material, context_material_name, img_data, 'disp')
433
+
434
+
elif line_id in {b'map_refl', b'refl'}: # reflectionmap
435
+
img_data = line.split()[1:]
436
+
if img_data:
437
+
load_material_image(context_material, context_material_name, img_data, 'refl')
438
+
else:
439
+
print("\t%r:%r (ignored)" % (filepath, line))
440
+
mtl.close()
441
+
442
+
443
+
def hideBone(bone):
444
+
bone.layers[1] = True
445
+
bone.layers[0] = False
446
+
447
+
448
+
def showBone(bone):
449
+
bone.layers[0] = True
450
+
bone.layers[1] = False
451
+
452
+
453
+
def visibleBone(bone):
454
+
return bone.layers[0]
455
+
456
+
457
+
def setMinimumLenght(bone):
458
+
default_length = 0.005
459
+
if bone.length == 0:
460
+
bone.tail = bone.head - mathutils.Vector((0, .01, 0))
461
+
if bone.length < default_length:
462
+
bone.length = default_length
463
+
464
+
465
+
def create_armatures(filepath, relpath,
466
+
armature_libs, unique_materials, unique_material_images,
467
+
use_image_search, float_func, new_armatures, new_objects, bone_names):
468
+
"""
469
+
Create armatures in this obj,
470
+
"""
471
+
DIR = os.path.dirname(filepath)
472
+
473
+
# Add an MTL with the same name as the obj if no MTLs are spesified.
474
+
temp_arl = os.path.splitext((os.path.basename(filepath)))[0] + ".arl"
475
+
476
+
if os.path.exists(os.path.join(DIR, temp_arl)):
477
+
armature_libs.add(temp_arl)
478
+
del temp_arl
479
+
480
+
for libname in sorted(armature_libs):
481
+
# print(libname)
482
+
arlpath = os.path.join(DIR, libname)
483
+
if not os.path.exists(arlpath):
484
+
print("\tArmature not found ARL: %r" % arlpath)
485
+
else:
486
+
# context_multi_line = b''
487
+
# line_start = b''
488
+
line_split = []
489
+
vec = []
490
+
# bone_names = []
491
+
bone_parents = []
492
+
bone_heads = []
493
+
494
+
# print('\t\tloading armature: %e' % arlpath)
495
+
with open(arlpath, 'rb') as mtl:
496
+
497
+
bone_count = None
498
+
read_b_name = read_b_head = read_b_parent = False
499
+
for line in mtl: # .readlines():
500
+
501
+
line = line.strip()
502
+
if not line or line.startswith(b'#'):
503
+
continue
504
+
505
+
line_split = line.split()
506
+
507
+
if not bone_count:
508
+
bone_count = int(line_split[0])
509
+
read_b_name = read_b_parent = read_b_head = False
510
+
read_b_name = True
511
+
elif read_b_name:
512
+
bone_names.append(line)
513
+
read_b_name = read_b_parent = read_b_head = False
514
+
read_b_parent = True
515
+
elif read_b_parent:
516
+
bone_parents.append(int(line_split[0]))
517
+
read_b_name = read_b_parent = read_b_head = False
518
+
read_b_head = True
519
+
elif read_b_head:
520
+
bone_heads.append([float_func(line_split[0]), float_func(line_split[1]), float_func(line_split[2])])
521
+
read_b_name = read_b_parent = read_b_head = False
522
+
read_b_name = True
523
+
524
+
# Create the armature object
525
+
me = bpy.data.armatures.new('Armature')
526
+
me.draw_type = 'STICK'
527
+
ob = bpy.data.objects.new(me.name, me)
528
+
ob.show_x_ray = True
529
+
530
+
bpy.context.scene.collection.objects.link(ob)
531
+
bpy.context.view_layer.objects.active = ob
532
+
bpy.ops.object.mode_set(mode='EDIT')
533
+
534
+
# Create all bones
535
+
for bone_id, bone_name in enumerate(bone_names):
536
+
bone = me.edit_bones.new(bone_name.decode('utf-8', 'replace'))
537
+
bone.head = bone_heads[bone_id]
538
+
bone.tail = bone.head # + mathutils.Vector((0,.01,0))
539
+
540
+
# Set bone heirarchy
541
+
for bone_id, bone_parent_id in enumerate(bone_parents):
542
+
if bone_parent_id >= 0:
543
+
me.edit_bones[bone_id].parent = me.edit_bones[bone_parent_id]
544
+
545
+
# Set calculate bone tails
546
+
for edit_bone in me.edit_bones:
547
+
if visibleBone(edit_bone):
548
+
childBones = [childBone for childBone in edit_bone.children
549
+
if visibleBone(childBone)]
550
+
else:
551
+
childBones = [childBone for childBone in edit_bone.children]
552
+
553
+
if childBones:
554
+
# Set tail to children middle
555
+
edit_bone.tail = mathutils.Vector(map(sum, zip(*(childBone.head.xyz for childBone in childBones))))/len(childBones)
556
+
else:
557
+
if edit_bone.parent:
558
+
vec = edit_bone.parent.tail - edit_bone.head
559
+
if (vec.length < .001):
560
+
edit_bone.tail = edit_bone.parent.vector + edit_bone.head
561
+
edit_bone.length = edit_bone.parent.length
562
+
else:
563
+
edit_bone.tail = (edit_bone.head - edit_bone.parent.tail) + edit_bone.head
564
+
edit_bone.length = 0.1
565
+
566
+
for edit_bone in me.edit_bones:
567
+
setMinimumLenght(edit_bone)
568
+
569
+
# Must add before creating the bones
570
+
bpy.ops.object.mode_set(mode='OBJECT')
571
+
new_armatures.append(ob)
572
+
573
+
574
+
def getVert(new_objects):
575
+
return [vert for obj in new_objects for vert in obj.data.vertices]
576
+
577
+
578
+
def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP, verts_bw):
579
+
"""
580
+
Takes vert_loc and faces, and separates into multiple sets of
581
+
(verts_loc, faces, unique_materials, dataname)
582
+
"""
583
+
584
+
filename = os.path.splitext((os.path.basename(filepath)))[0]
585
+
586
+
if not SPLIT_OB_OR_GROUP or not faces:
587
+
use_verts_nor = any((False if f[1] is ... else True) for f in faces)
588
+
use_verts_tex = any((False if f[2] is ... else True) for f in faces)
589
+
use_verts_col = any((False if f[3] is ... else True) for f in faces)
590
+
# use the filename for the object name since we aren't chopping up the mesh.
591
+
return [(verts_loc, faces, unique_materials, filename, use_verts_nor, use_verts_tex, use_verts_col, verts_bw)]
592
+
593
+
def key_to_name(key):
594
+
# if the key is a tuple, join it to make a string
595
+
if not key:
596
+
return filename # assume its a string. make sure this is true if the splitting code is changed
597
+
else:
598
+
return key.decode('utf-8', 'replace')
599
+
600
+
# Return a key that makes the faces unique.
601
+
face_split_dict = {}
602
+
603
+
oldkey = -1 # initialize to a value that will never match the key
604
+
605
+
for face in faces:
606
+
key = face[6]
607
+
608
+
if oldkey != key:
609
+
# Check the key has changed.
610
+
(verts_split, faces_split, unique_materials_split, vert_remap,
611
+
use_verts_nor, use_verts_tex, use_verts_col, verts_bw_split) = face_split_dict.setdefault(key, ([], [], {}, {}, [], [], [], []))
612
+
oldkey = key
613
+
614
+
face_vert_loc_indices = face[0]
615
+
616
+
if not use_verts_nor and face[1] is not ...:
617
+
use_verts_nor.append(True)
618
+
619
+
if not use_verts_tex and face[2] is not ...:
620
+
use_verts_tex.append(True)
621
+
622
+
if not use_verts_col and face[3] is not ...:
623
+
use_verts_col.append(True)
624
+
625
+
# Remap verts to new vert list and add where needed
626
+
for enum, i in enumerate(face_vert_loc_indices):
627
+
map_index = vert_remap.get(i)
628
+
if map_index is None:
629
+
map_index = len(verts_split)
630
+
vert_remap[i] = map_index # set the new remapped index so we only add once and can reference next time.
631
+
verts_split.append(verts_loc[i]) # add the vert to the local verts
632
+
if verts_bw:
633
+
verts_bw_split.append(verts_bw[i]) # add the vertex weight
634
+
635
+
face_vert_loc_indices[enum] = map_index # remap to the local index
636
+
637
+
matname = face[4]
638
+
if matname and matname not in unique_materials_split:
639
+
unique_materials_split[matname] = unique_materials[matname]
640
+
641
+
faces_split.append(face)
642
+
643
+
# remove one of the items and reorder
644
+
return [(verts_split, faces_split, unique_materials_split, key_to_name(key), bool(use_vnor), bool(use_vtex), bool(use_vcol), verts_bw_split)
645
+
for key, (verts_split, faces_split, unique_materials_split, _, use_vnor, use_vtex, use_vcol, verts_bw_split)
646
+
in face_split_dict.items()]
647
+
648
+
649
+
def create_mesh(new_objects,
650
+
use_edges,
651
+
verts_loc,
652
+
verts_nor,
653
+
verts_tex,
654
+
verts_col,
655
+
faces,
656
+
unique_materials,
657
+
unique_material_images,
658
+
unique_smooth_groups,
659
+
vertex_groups,
660
+
dataname,
661
+
verts_bw,
662
+
new_armatures,
663
+
bone_names
664
+
):
665
+
"""
666
+
Takes all the data gathered and generates a mesh, adding the new object to new_objects
667
+
deals with ngons, sharp edges and assigning materials
668
+
"""
669
+
670
+
if unique_smooth_groups:
671
+
sharp_edges = set()
672
+
smooth_group_users = {context_smooth_group: {} for context_smooth_group in unique_smooth_groups.keys()}
673
+
context_smooth_group_old = -1
674
+
675
+
fgon_edges = set() # Used for storing fgon keys when we need to tesselate/untesselate them (ngons with hole).
676
+
edges = []
677
+
tot_loops = 0
678
+
679
+
context_object = None
680
+
681
+
# reverse loop through face indices
682
+
for f_idx in range(len(faces) - 1, -1, -1):
683
+
(face_vert_loc_indices,
684
+
face_vert_nor_indices,
685
+
face_vert_tex_indices,
686
+
face_vert_col_indices,
687
+
context_material,
688
+
context_smooth_group,
689
+
context_object,
690
+
face_invalid_blenpoly,
691
+
) = faces[f_idx]
692
+
693
+
len_face_vert_loc_indices = len(face_vert_loc_indices)
694
+
695
+
if len_face_vert_loc_indices == 1:
696
+
faces.pop(f_idx) # cant add single vert faces
697
+
698
+
# Face with a single item in face_vert_nor_indices is actually a polyline!
699
+
elif len(face_vert_nor_indices) == 1 or len_face_vert_loc_indices == 2:
700
+
if use_edges:
701
+
edges.extend((face_vert_loc_indices[i], face_vert_loc_indices[i + 1])
702
+
for i in range(len_face_vert_loc_indices - 1))
703
+
faces.pop(f_idx)
704
+
705
+
else:
706
+
# Smooth Group
707
+
if unique_smooth_groups and context_smooth_group:
708
+
# Is a part of of a smooth group and is a face
709
+
if context_smooth_group_old is not context_smooth_group:
710
+
edge_dict = smooth_group_users[context_smooth_group]
711
+
context_smooth_group_old = context_smooth_group
712
+
713
+
prev_vidx = face_vert_loc_indices[-1]
714
+
for vidx in face_vert_loc_indices:
715
+
edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
716
+
prev_vidx = vidx
717
+
edge_dict[edge_key] = edge_dict.get(edge_key, 0) + 1
718
+
719
+
# NGons into triangles
720
+
if face_invalid_blenpoly:
721
+
# ignore triangles with invalid indices
722
+
if len(face_vert_loc_indices) > 3:
723
+
from bpy_extras.mesh_utils import ngon_tessellate
724
+
ngon_face_indices = ngon_tessellate(verts_loc, face_vert_loc_indices)
725
+
faces.extend([(
726
+
[face_vert_loc_indices[ngon[0]],
727
+
face_vert_loc_indices[ngon[1]],
728
+
face_vert_loc_indices[ngon[2]],
729
+
],
730
+
[face_vert_nor_indices[ngon[0]],
731
+
face_vert_nor_indices[ngon[1]],
732
+
face_vert_nor_indices[ngon[2]],
733
+
] if face_vert_nor_indices else [],
734
+
[face_vert_tex_indices[ngon[0]],
735
+
face_vert_tex_indices[ngon[1]],
736
+
face_vert_tex_indices[ngon[2]],
737
+
] if face_vert_tex_indices else [],
738
+
[face_vert_col_indices[ngon[0]],
739
+
face_vert_col_indices[ngon[1]],
740
+
face_vert_col_indices[ngon[2]],
741
+
] if face_vert_col_indices else [],
742
+
context_material,
743
+
context_smooth_group,
744
+
context_object,
745
+
[],
746
+
)
747
+
for ngon in ngon_face_indices]
748
+
)
749
+
tot_loops += 3 * len(ngon_face_indices)
750
+
751
+
# edges to make ngons
752
+
if len(ngon_face_indices) > 1:
753
+
edge_users = set()
754
+
for ngon in ngon_face_indices:
755
+
prev_vidx = face_vert_loc_indices[ngon[-1]]
756
+
for ngidx in ngon:
757
+
vidx = face_vert_loc_indices[ngidx]
758
+
if vidx == prev_vidx:
759
+
continue # broken OBJ... Just skip.
760
+
edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
761
+
prev_vidx = vidx
762
+
if edge_key in edge_users:
763
+
fgon_edges.add(edge_key)
764
+
else:
765
+
edge_users.add(edge_key)
766
+
767
+
faces.pop(f_idx)
768
+
else:
769
+
tot_loops += len_face_vert_loc_indices
770
+
771
+
# Build sharp edges
772
+
if unique_smooth_groups:
773
+
for edge_dict in smooth_group_users.values():
774
+
for key, users in edge_dict.items():
775
+
if users == 1: # This edge is on the boundry of a group
776
+
sharp_edges.add(key)
777
+
778
+
# map the material names to an index
779
+
material_mapping = {name: i for i, name in enumerate(unique_materials)} # enumerate over unique_materials keys()
780
+
781
+
materials = [None] * len(unique_materials)
782
+
783
+
for name, index in material_mapping.items():
784
+
materials[index] = unique_materials[name]
785
+
786
+
me = bpy.data.meshes.new(dataname)
787
+
788
+
# make sure the list isnt too big
789
+
for material in materials:
790
+
me.materials.append(material)
791
+
792
+
me.vertices.add(len(verts_loc))
793
+
me.loops.add(tot_loops)
794
+
me.polygons.add(len(faces))
795
+
796
+
# verts_loc is a list of (x, y, z) tuples
797
+
me.vertices.foreach_set("co", unpack_list(verts_loc))
798
+
799
+
loops_vert_idx = []
800
+
faces_loop_start = []
801
+
faces_loop_total = []
802
+
lidx = 0
803
+
for f in faces:
804
+
vidx = f[0]
805
+
nbr_vidx = len(vidx)
806
+
loops_vert_idx.extend(vidx)
807
+
faces_loop_start.append(lidx)
808
+
faces_loop_total.append(nbr_vidx)
809
+
lidx += nbr_vidx
810
+
811
+
me.loops.foreach_set("vertex_index", loops_vert_idx)
812
+
me.polygons.foreach_set("loop_start", faces_loop_start)
813
+
me.polygons.foreach_set("loop_total", faces_loop_total)
814
+
815
+
if verts_nor and me.loops:
816
+
# Note: we store 'temp' normals in loops, since validate() may alter final mesh,
817
+
# we can only set custom lnors *after* calling it.
818
+
me.create_normals_split()
819
+
820
+
if verts_tex and me.polygons:
821
+
me.uv_layers.new()
822
+
823
+
if verts_col and me.polygons:
824
+
me.vertex_colors.new()
825
+
826
+
context_material_old = -1 # avoid a dict lookup
827
+
mat = 0 # rare case it may be un-initialized.
828
+
829
+
for i, (face, blen_poly) in enumerate(zip(faces, me.polygons)):
830
+
if len(face[0]) < 3:
831
+
raise Exception("bad face") # Shall not happen, we got rid of those earlier!
832
+
833
+
(face_vert_loc_indices,
834
+
face_vert_nor_indices,
835
+
face_vert_tex_indices,
836
+
face_vert_col_indices,
837
+
context_material,
838
+
context_smooth_group,
839
+
context_object,
840
+
face_invalid_blenpoly,
841
+
) = face
842
+
843
+
if context_smooth_group:
844
+
blen_poly.use_smooth = True
845
+
846
+
if context_material:
847
+
if context_material_old is not context_material:
848
+
mat = material_mapping[context_material]
849
+
context_material_old = context_material
850
+
blen_poly.material_index = mat
851
+
852
+
if verts_nor and face_vert_nor_indices:
853
+
for face_noidx, lidx in zip(face_vert_nor_indices, blen_poly.loop_indices):
854
+
me.loops[lidx].normal[:] = verts_nor[0 if (face_noidx is ...) else face_noidx]
855
+
856
+
if verts_col and face_vert_col_indices:
857
+
for face_colidx, lidx in zip(face_vert_col_indices, blen_poly.loop_indices):
858
+
me.vertex_colors[0].data[lidx].color[:] = verts_col[0 if (face_colidx is ...) else face_colidx][:3]
859
+
860
+
if verts_tex and face_vert_tex_indices:
861
+
if context_material:
862
+
image = unique_material_images[context_material]
863
+
if image: # Can be none if the material dosnt have an image.
864
+
me.uv_textures[0].data[i].image = image
865
+
866
+
blen_uvs = me.uv_layers[0]
867
+
for face_uvidx, lidx in zip(face_vert_tex_indices, blen_poly.loop_indices):
868
+
blen_uvs.data[lidx].uv = verts_tex[0 if (face_uvidx is ...) else face_uvidx]
869
+
870
+
use_edges = use_edges and bool(edges)
871
+
if use_edges:
872
+
me.edges.add(len(edges))
873
+
# edges should be a list of (a, b) tuples
874
+
me.edges.foreach_set("vertices", unpack_list(edges))
875
+
876
+
me.validate(clean_customdata=False) # *Very* important to not remove lnors here!
877
+
me.update(calc_edges=use_edges)
878
+
879
+
# Un-tessellate as much as possible, in case we had to triangulate some ngons...
880
+
if fgon_edges:
881
+
import bmesh
882
+
bm = bmesh.new()
883
+
bm.from_mesh(me)
884
+
verts = bm.verts[:]
885
+
get = bm.edges.get
886
+
edges = [get((verts[vidx1], verts[vidx2])) for vidx1, vidx2 in fgon_edges]
887
+
try:
888
+
bmesh.ops.dissolve_edges(bm, edges=edges, use_verts=False)
889
+
except:
890
+
# Possible dissolve fails for some edges, but don't fail silently in case this is a real bug.
891
+
import traceback
892
+
traceback.print_exc()
893
+
894
+
bm.to_mesh(me)
895
+
bm.free()
896
+
897
+
# XXX If validate changes the geometry, this is likely to be broken...
898
+
if unique_smooth_groups and sharp_edges:
899
+
for e in me.edges:
900
+
if e.key in sharp_edges:
901
+
e.use_edge_sharp = True
902
+
me.show_edge_sharp = True
903
+
904
+
if verts_nor:
905
+
clnors = array.array('f', [0.0] * (len(me.loops) * 3))
906
+
me.loops.foreach_get("normal", clnors)
907
+
908
+
if not unique_smooth_groups:
909
+
me.polygons.foreach_set("use_smooth", [True] * len(me.polygons))
910
+
911
+
me.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3)))
912
+
me.use_auto_smooth = True
913
+
me.show_edge_sharp = True
914
+
915
+
ob = bpy.data.objects.new(me.name, me)
916
+
armature_ob = None
917
+
if new_armatures:
918
+
armature_ob = new_armatures[0]
919
+
920
+
if armature_ob:
921
+
# Assingn vertex weights
922
+
mod = ob.modifiers.new(type="ARMATURE", name="Armature")
923
+
mod.use_vertex_groups = True
924
+
mod.object = armature_ob
925
+
926
+
parent_armature = True
927
+
if parent_armature:
928
+
ob.parent = armature_ob
929
+
930
+
for vert_id, bws in enumerate(verts_bw):
931
+
for bw in bws:
932
+
bone_idx, bone_weight = bw
933
+
# print('----')
934
+
# print('bone_idx', bone_idx)
935
+
# print('bone_names', bone_names)
936
+
bone_name = bone_names[bone_idx].decode('utf-8', "replace")
937
+
if bone_weight == 0.0 or bone_name == 'root groud':
938
+
continue
939
+
940
+
if bone_name:
941
+
vert_group = ob.vertex_groups.get(bone_name)
942
+
if not vert_group:
943
+
vert_group = ob.vertex_groups.new(bone_name)
944
+
vert_group.add([vert_id], bone_weight, 'REPLACE')
945
+
946
+
new_objects.append(ob)
947
+
948
+
# Create the vertex groups. No need to have the flag passed here since we test for the
949
+
# content of the vertex_groups. If the user selects to NOT have vertex groups saved then
950
+
# the following test will never run
951
+
for group_name, group_indices in vertex_groups.items():
952
+
group = ob.vertex_groups.new(group_name.decode('utf-8', "replace"))
953
+
group.add(group_indices, 1.0, 'REPLACE')
954
+
955
+
956
+
def create_nurbs(context_nurbs, vert_loc, new_objects):
957
+
"""
958
+
Add nurbs object to blender, only support one type at the moment
959
+
"""
960
+
deg = context_nurbs.get(b'deg', (3,))
961
+
curv_range = context_nurbs.get(b'curv_range')
962
+
curv_idx = context_nurbs.get(b'curv_idx', [])
963
+
parm_u = context_nurbs.get(b'parm_u', [])
964
+
parm_v = context_nurbs.get(b'parm_v', [])
965
+
name = context_nurbs.get(b'name', b'ObjNurb')
966
+
cstype = context_nurbs.get(b'cstype')
967
+
968
+
if cstype is None:
969
+
print('\tWarning, cstype not found')
970
+
return
971
+
if cstype != b'bspline':
972
+
print('\tWarning, cstype is not supported (only bspline)')
973
+
return
974
+
if not curv_idx:
975
+
print('\tWarning, curv argument empty or not set')
976
+
return
977
+
if len(deg) > 1 or parm_v:
978
+
print('\tWarning, surfaces not supported')
979
+
return
980
+
981
+
cu = bpy.data.curves.new(name.decode('utf-8', "replace"), 'CURVE')
982
+
cu.dimensions = '3D'
983
+
984
+
nu = cu.splines.new('NURBS')
985
+
nu.points.add(len(curv_idx) - 1) # a point is added to start with
986
+
nu.points.foreach_set("co", [co_axis for vt_idx in curv_idx for co_axis in (vert_loc[vt_idx] + (1.0,))])
987
+
988
+
nu.order_u = deg[0] + 1
989
+
990
+
# get for endpoint flag from the weighting
991
+
if curv_range and len(parm_u) > deg[0] + 1:
992
+
do_endpoints = True
993
+
for i in range(deg[0] + 1):
994
+
995
+
if abs(parm_u[i] - curv_range[0]) > 0.0001:
996
+
do_endpoints = False
997
+
break
998
+
999
+
if abs(parm_u[-(i + 1)] - curv_range[1]) > 0.0001:
1000
+
do_endpoints = False
1001
+
break
1002
+
1003
+
else:
1004
+
do_endpoints = False
1005
+
1006
+
if do_endpoints:
1007
+
nu.use_endpoint_u = True
1008
+
1009
+
# close
1010
+
'''
1011
+
do_closed = False
1012
+
if len(parm_u) > deg[0]+1:
1013
+
for i in xrange(deg[0]+1):
1014
+
# print curv_idx[i], curv_idx[-(i+1)]
1015
+
1016
+
if curv_idx[i]==curv_idx[-(i+1)]:
1017
+
do_closed = True
1018
+
break
1019
+
1020
+
if do_closed:
1021
+
nu.use_cyclic_u = True
1022
+
'''
1023
+
1024
+
ob = bpy.data.objects.new(name.decode('utf-8', "replace"), cu)
1025
+
1026
+
new_objects.append(ob)
1027
+
1028
+
1029
+
def strip_slash(line_split):
1030
+
if line_split[-1][-1] == 92: # '\' char
1031
+
if len(line_split[-1]) == 1:
1032
+
line_split.pop() # remove the \ item
1033
+
else:
1034
+
line_split[-1] = line_split[-1][:-1] # remove the \ from the end last number
1035
+
return True
1036
+
return False
1037
+
1038
+
1039
+
def get_float_func(filepath):
1040
+
"""
1041
+
find the float function for this obj file
1042
+
- whether to replace commas or not
1043
+
"""
1044
+
file = open(filepath, 'rb')
1045
+
for line in file: # .readlines():
1046
+
line = line.lstrip()
1047
+
if line.startswith(b'v'): # vn vt v
1048
+
if b',' in line:
1049
+
file.close()
1050
+
return lambda f: float(f.replace(b',', b'.'))
1051
+
elif b'.' in line:
1052
+
file.close()
1053
+
return float
1054
+
1055
+
file.close()
1056
+
# in case all vert values were ints
1057
+
return float
1058
+
1059
+
1060
+
def load(context,
1061
+
filepath,
1062
+
*,
1063
+
global_clamp_size=0.0,
1064
+
use_smooth_groups=True,
1065
+
use_edges=True,
1066
+
use_split_objects=True,
1067
+
use_split_groups=True,
1068
+
use_image_search=True,
1069
+
use_groups_as_vgroups=False,
1070
+
relpath=None,
1071
+
global_matrix=None
1072
+
):
1073
+
"""
1074
+
Called by the user interface or another script.
1075
+
load_obj(path) - should give acceptable results.
1076
+
This function passes the file and sends the data off
1077
+
to be split into objects and then converted into mesh objects
1078
+
"""
1079
+
1080
+
def handle_vec(line_start, context_multi_line, line_split, tag, data, vec, vec_len):
1081
+
ret_context_multi_line = tag if strip_slash(line_split) else b''
1082
+
if line_start == tag:
1083
+
vec[:] = [float_func(v) for v in line_split[1:]]
1084
+
elif context_multi_line == tag:
1085
+
vec += [float_func(v) for v in line_split]
1086
+
if not ret_context_multi_line:
1087
+
data.append(tuple(vec[:vec_len]))
1088
+
return ret_context_multi_line
1089
+
1090
+
def handle_bw_vec(line_start, context_multi_line, line_split, line, tag, data, vec, vec_len):
1091
+
str_line = [line]
1092
+
ret_context_multi_line = tag if strip_slash(str_line) else b''
1093
+
if line_start == tag:
1094
+
vec[:] = str_line
1095
+
elif context_multi_line == tag:
1096
+
vec[:] = [vec[0] + str_line[0]]
1097
+
if not ret_context_multi_line:
1098
+
str_vec = b''.join(vec)
1099
+
str_str = str_vec.decode("utf-8", "ignore")
1100
+
str_data = str_str.split(' ', 1)[1]
1101
+
data.append(ast.literal_eval(str_data)[:vec_len])
1102
+
return ret_context_multi_line
1103
+
1104
+
def create_face(context_material, context_smooth_group, context_object):
1105
+
face_vert_loc_indices = []
1106
+
face_vert_nor_indices = []
1107
+
face_vert_tex_indices = []
1108
+
face_vert_col_indices = []
1109
+
return (
1110
+
face_vert_loc_indices, # face item 0
1111
+
face_vert_nor_indices, # face item 1
1112
+
face_vert_tex_indices, # face item 2
1113
+
face_vert_col_indices, # face item 3
1114
+
context_material, # face item 4
1115
+
context_smooth_group, # face item 5
1116
+
context_object, # face item 6
1117
+
[], # If non-empty, that face is a Blender-invalid ngon (holes...), need a mutable object for that...
1118
+
)
1119
+
1120
+
with ProgressReport(context.window_manager) as progress:
1121
+
progress.enter_substeps(1, "Importing OBJ %r..." % filepath)
1122
+
1123
+
if global_matrix is None:
1124
+
global_matrix = mathutils.Matrix()
1125
+
1126
+
if use_split_objects or use_split_groups:
1127
+
use_groups_as_vgroups = False
1128
+
1129
+
verts_loc = []
1130
+
verts_nor = []
1131
+
verts_tex = []
1132
+
verts_col = []
1133
+
verts_bw = []
1134
+
faces = [] # tuples of the faces
1135
+
material_libs = set() # filenames to material libs this OBJ uses
1136
+
armature_libs = set() # filenames to armature libs this OBJ uses
1137
+
vertex_groups = {} # when use_groups_as_vgroups is true
1138
+
1139
+
# Get the string to float conversion func for this file- is 'float' for almost all files.
1140
+
float_func = get_float_func(filepath)
1141
+
1142
+
# Context variables
1143
+
context_material = None
1144
+
context_smooth_group = None
1145
+
context_object = None
1146
+
context_vgroup = None
1147
+
1148
+
# Nurbs
1149
+
context_nurbs = {}
1150
+
nurbs = []
1151
+
context_parm = b'' # used by nurbs too but could be used elsewhere
1152
+
1153
+
# Until we can use sets
1154
+
unique_materials = {}
1155
+
unique_material_images = {}
1156
+
unique_smooth_groups = {}
1157
+
# unique_obects= {} - no use for this variable since the objects are stored in the face.
1158
+
1159
+
# when there are faces that end with \
1160
+
# it means they are multiline-
1161
+
# since we use xreadline we cant skip to the next line
1162
+
# so we need to know whether
1163
+
context_multi_line = b''
1164
+
1165
+
# Per-face handling data.
1166
+
face_vert_loc_indices = None
1167
+
face_vert_nor_indices = None
1168
+
face_vert_tex_indices = None
1169
+
face_vert_col_indices = None
1170
+
face_vert_nor_valid = face_vert_tex_valid = face_vert_col_valid = False
1171
+
face_items_usage = set()
1172
+
face_invalid_blenpoly = None
1173
+
prev_vidx = None
1174
+
face = None
1175
+
vec = []
1176
+
1177
+
progress.enter_substeps(3, "Parsing OBJ file...")
1178
+
with open(filepath, 'rb') as f:
1179
+
for line in f: # .readlines():
1180
+
line_split = line.split()
1181
+
1182
+
if not line_split:
1183
+
continue
1184
+
1185
+
line_start = line_split[0] # we compare with this a _lot_
1186
+
1187
+
if line_start == b'v' or context_multi_line == b'v':
1188
+
context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'v', verts_loc, vec, 3)
1189
+
1190
+
elif line_start == b'vn' or context_multi_line == b'vn':
1191
+
context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'vn', verts_nor, vec, 3)
1192
+
1193
+
elif line_start == b'vt' or context_multi_line == b'vt':
1194
+
context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'vt', verts_tex, vec, 2)
1195
+
1196
+
elif line_start == b'vc' or context_multi_line == b'vc':
1197
+
context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'vc', verts_col, vec, 4)
1198
+
1199
+
elif line_start == b'bw' or context_multi_line == b'bw':
1200
+
context_multi_line = handle_bw_vec(line_start, context_multi_line, line_split, line, b'bw', verts_bw, vec, 4)
1201
+
1202
+
# Handle faces lines (as faces) and the second+ lines of fa multiline face here
1203
+
# use 'f' not 'f ' because some objs (very rare have 'fo ' for faces)
1204
+
elif line_start == b'f' or context_multi_line == b'f':
1205
+
if not context_multi_line:
1206
+
line_split = line_split[1:]
1207
+
# Instantiate a face
1208
+
face = create_face(context_material, context_smooth_group, context_object)
1209
+
(face_vert_loc_indices, face_vert_nor_indices, face_vert_tex_indices, face_vert_col_indices,
1210
+
_1, _2, _3, face_invalid_blenpoly) = face
1211
+
faces.append(face)
1212
+
face_items_usage.clear()
1213
+
# Else, use face_vert_loc_indices and face_vert_tex_indices and face_vert_col_indices previously defined and used the obj_face
1214
+
1215
+
context_multi_line = b'f' if strip_slash(line_split) else b''
1216
+
1217
+
for v in line_split:
1218
+
obj_vert = v.split(b'/')
1219
+
# obj_vert[0] coordinate index
1220
+
# obj_vert[1] texture mapping index
1221
+
# obj_vert[2] normal index
1222
+
# obj_vert[3] color index
1223
+
1224
+
idx = int(obj_vert[0]) - 1
1225
+
vert_loc_index = (idx + len(verts_loc) + 1) if (idx < 0) else idx
1226
+
# Add the vertex to the current group
1227
+
# *warning*, this wont work for files that have groups defined around verts
1228
+
if use_groups_as_vgroups and context_vgroup:
1229
+
vertex_groups[context_vgroup].append(vert_loc_index)
1230
+
# This a first round to quick-detect ngons that *may* use a same edge more than once.
1231
+
# Potential candidate will be re-checked once we have done parsing the whole face.
1232
+
if not face_invalid_blenpoly:
1233
+
# If we use more than once a same vertex, invalid ngon is suspected.
1234
+
if vert_loc_index in face_items_usage:
1235
+
face_invalid_blenpoly.append(True)
1236
+
else:
1237
+
face_items_usage.add(vert_loc_index)
1238
+
face_vert_loc_indices.append(vert_loc_index)
1239
+
1240
+
# formatting for faces with normals and textures and vert color is
1241
+
# loc_index/tex_index/nor_index/vcol_index
1242
+
if len(obj_vert) > 1 and obj_vert[1] and obj_vert[1] != b'0':
1243
+
idx = int(obj_vert[1]) - 1
1244
+
face_vert_tex_indices.append((idx + len(verts_tex) + 1) if (idx < 0) else idx)
1245
+
face_vert_tex_valid = True
1246
+
else:
1247
+
face_vert_tex_indices.append(...)
1248
+
1249
+
if len(obj_vert) > 2 and obj_vert[2] and obj_vert[2] != b'0':
1250
+
idx = int(obj_vert[2]) - 1
1251
+
face_vert_nor_indices.append((idx + len(verts_nor) + 1) if (idx < 0) else idx)
1252
+
face_vert_nor_valid = True
1253
+
else:
1254
+
face_vert_nor_indices.append(...)
1255
+
1256
+
if len(obj_vert) > 3 and obj_vert[3] and obj_vert[3] != b'0':
1257
+
idx = int(obj_vert[3]) - 1
1258
+
face_vert_col_indices.append((idx + len(verts_col) + 1) if (idx < 0) else idx)
1259
+
face_vert_col_valid = True
1260
+
else:
1261
+
face_vert_col_indices.append(...)
1262
+
1263
+
if not context_multi_line:
1264
+
# Clear nor/tex indices in case we had none defined for this face.
1265
+
if not face_vert_nor_valid:
1266
+
face_vert_nor_indices.clear()
1267
+
if not face_vert_tex_valid:
1268
+
face_vert_tex_indices.clear()
1269
+
if not face_vert_col_valid:
1270
+
face_vert_col_indices.clear()
1271
+
face_vert_nor_valid = face_vert_tex_valid = face_vert_col_valid = False
1272
+
1273
+
# Means we have finished a face, we have to do final check if ngon is suspected to be blender-invalid...
1274
+
if face_invalid_blenpoly:
1275
+
face_invalid_blenpoly.clear()
1276
+
face_items_usage.clear()
1277
+
prev_vidx = face_vert_loc_indices[-1]
1278
+
for vidx in face_vert_loc_indices:
1279
+
edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
1280
+
if edge_key in face_items_usage:
1281
+
face_invalid_blenpoly.append(True)
1282
+
break
1283
+
face_items_usage.add(edge_key)
1284
+
prev_vidx = vidx
1285
+
1286
+
elif use_edges and (line_start == b'l' or context_multi_line == b'l'):
1287
+
# very similar to the face load function above with some parts removed
1288
+
if not context_multi_line:
1289
+
line_split = line_split[1:]
1290
+
# Instantiate a face
1291
+
face = create_face(context_material, context_smooth_group, context_object)
1292
+
face_vert_loc_indices = face[0]
1293
+
# XXX A bit hackish, we use special 'value' of face_vert_nor_indices (a single True item) to tag this
1294
+
# as a polyline, and not a regular face...
1295
+
face[1][:] = [True]
1296
+
faces.append(face)
1297
+
# Else, use face_vert_loc_indices previously defined and used the obj_face
1298
+
1299
+
context_multi_line = b'l' if strip_slash(line_split) else b''
1300
+
1301
+
for v in line_split:
1302
+
obj_vert = v.split(b'/')
1303
+
idx = int(obj_vert[0]) - 1
1304
+
face_vert_loc_indices.append((idx + len(verts_loc) + 1) if (idx < 0) else idx)
1305
+
1306
+
elif line_start == b's':
1307
+
if use_smooth_groups:
1308
+
context_smooth_group = line_value(line_split)
1309
+
if context_smooth_group == b'off':
1310
+
context_smooth_group = None
1311
+
elif context_smooth_group: # is not None
1312
+
unique_smooth_groups[context_smooth_group] = None
1313
+
1314
+
elif line_start == b'o':
1315
+
if use_split_objects:
1316
+
context_object = line_value(line_split)
1317
+
# unique_obects[context_object]= None
1318
+
1319
+
elif line_start == b'g':
1320
+
if use_split_groups:
1321
+
context_object = line_value(line.split())
1322
+
# print 'context_object', context_object
1323
+
# unique_obects[context_object]= None
1324
+
elif use_groups_as_vgroups:
1325
+
context_vgroup = line_value(line.split())
1326
+
if context_vgroup and context_vgroup != b'(null)':
1327
+
vertex_groups.setdefault(context_vgroup, [])
1328
+
else:
1329
+
context_vgroup = None # dont assign a vgroup
1330
+
1331
+
elif line_start == b'usemtl':
1332
+
context_material = line_value(line.split())
1333
+
unique_materials[context_material] = None
1334
+
elif line_start == b'mtllib': # usemap or usemat
1335
+
# can have multiple mtllib filenames per line, mtllib can appear more than once,
1336
+
# so make sure only occurrence of material exists
1337
+
material_libs |= {os.fsdecode(f) for f in line.split()[1:]}
1338
+
elif line_start == b'arllib': # armature
1339
+
# can have multiple arllib filenames per line, arllib can appear more than once
1340
+
armature_libs |= {os.fsdecode(f) for f in line.split()[1:]}
1341
+
1342
+
# Nurbs support
1343
+
elif line_start == b'cstype':
1344
+
context_nurbs[b'cstype'] = line_value(line.split()) # 'rat bspline' / 'bspline'
1345
+
elif line_start == b'curv' or context_multi_line == b'curv':
1346
+
curv_idx = context_nurbs[b'curv_idx'] = context_nurbs.get(b'curv_idx', []) # in case were multiline
1347
+
1348
+
if not context_multi_line:
1349
+
context_nurbs[b'curv_range'] = float_func(line_split[1]), float_func(line_split[2])
1350
+
line_split[0:3] = [] # remove first 3 items
1351
+
1352
+
if strip_slash(line_split):
1353
+
context_multi_line = b'curv'
1354
+
else:
1355
+
context_multi_line = b''
1356
+
1357
+
for i in line_split:
1358
+
vert_loc_index = int(i) - 1
1359
+
1360
+
if vert_loc_index < 0:
1361
+
vert_loc_index = len(verts_loc) + vert_loc_index + 1
1362
+
1363
+
curv_idx.append(vert_loc_index)
1364
+
1365
+
elif line_start == b'parm' or context_multi_line == b'parm':
1366
+
if context_multi_line:
1367
+
context_multi_line = b''
1368
+
else:
1369
+
context_parm = line_split[1]
1370
+
line_split[0:2] = [] # remove first 2
1371
+
1372
+
if strip_slash(line_split):
1373
+
context_multi_line = b'parm'
1374
+
else:
1375
+
context_multi_line = b''
1376
+
1377
+
if context_parm.lower() == b'u':
1378
+
context_nurbs.setdefault(b'parm_u', []).extend([float_func(f) for f in line_split])
1379
+
elif context_parm.lower() == b'v': # surfaces not supported yet
1380
+
context_nurbs.setdefault(b'parm_v', []).extend([float_func(f) for f in line_split])
1381
+
# else: # may want to support other parm's ?
1382
+
1383
+
elif line_start == b'deg':
1384
+
context_nurbs[b'deg'] = [int(i) for i in line.split()[1:]]
1385
+
elif line_start == b'end':
1386
+
# Add the nurbs curve
1387
+
if context_object:
1388
+
context_nurbs[b'name'] = context_object
1389
+
nurbs.append(context_nurbs)
1390
+
context_nurbs = {}
1391
+
context_parm = b''
1392
+
1393
+
''' # How to use usemap? depricated?
1394
+
elif line_start == b'usema': # usemap or usemat
1395
+
context_image= line_value(line_split)
1396
+
'''
1397
+
1398
+
progress.step("Done, loading materials and images...")
1399
+
1400
+
create_materials(filepath, relpath, material_libs, unique_materials,
1401
+
unique_material_images, use_image_search, float_func)
1402
+
1403
+
progress.step("Done, building geometries (verts:%i faces:%i materials: %i smoothgroups:%i) ..." %
1404
+
(len(verts_loc), len(faces), len(unique_materials), len(unique_smooth_groups)))
1405
+
1406
+
# deselect all
1407
+
if bpy.ops.object.select_all.poll():
1408
+
bpy.ops.object.select_all(action='DESELECT')
1409
+
1410
+
scene = context.scene
1411
+
new_objects = [] # put new objects here
1412
+
new_armatures = [] # put new armatures here
1413
+
bone_names = []
1414
+
1415
+
create_armatures(filepath, relpath, armature_libs, unique_materials,
1416
+
unique_material_images, use_image_search, float_func, new_armatures, new_objects, bone_names)
1417
+
1418
+
# Split the mesh by objects/materials, may
1419
+
SPLIT_OB_OR_GROUP = bool(use_split_objects or use_split_groups)
1420
+
1421
+
for data in split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP, verts_bw):
1422
+
verts_loc_split, faces_split, unique_materials_split, dataname, use_vnor, use_vtex, use_vcol, verts_bw_split = data
1423
+
# Create meshes from the data, warning 'vertex_groups' wont support splitting
1424
+
# ~ print(dataname, use_vnor, use_vtex, use_vcol)
1425
+
create_mesh(new_objects,
1426
+
use_edges,
1427
+
verts_loc_split,
1428
+
verts_nor if use_vnor else [],
1429
+
verts_tex if use_vtex else [],
1430
+
verts_col if use_vcol else [],
1431
+
faces_split,
1432
+
unique_materials_split,
1433
+
unique_material_images,
1434
+
unique_smooth_groups,
1435
+
vertex_groups,
1436
+
dataname,
1437
+
verts_bw_split,
1438
+
new_armatures,
1439
+
bone_names,
1440
+
)
1441
+
1442
+
# nurbs support
1443
+
for context_nurbs in nurbs:
1444
+
create_nurbs(context_nurbs, verts_loc, new_objects)
1445
+
1446
+
for obj in new_armatures:
1447
+
obj.select_set(state=True)
1448
+
1449
+
# we could apply this anywhere before scaling.
1450
+
# Child object inherit world_matrix, so only apply it to the parent
1451
+
parent_obj = obj
1452
+
while parent_obj.parent is not None:
1453
+
parent_obj = parent_obj.parent
1454
+
1455
+
parent_obj.matrix_world = global_matrix
1456
+
1457
+
# Create new obj
1458
+
for obj in new_objects:
1459
+
base = scene.objects.link(obj)
1460
+
base.select_set(state=True)
1461
+
1462
+
# we could apply this anywhere before scaling.
1463
+
# Child object inherit world_matrix, so only apply it to the parent
1464
+
parent_obj = obj
1465
+
while parent_obj.parent is not None:
1466
+
parent_obj = parent_obj.parent
1467
+
1468
+
parent_obj.matrix_world = global_matrix
1469
+
1470
+
scene.update()
1471
+
1472
+
axis_min = [1000000000] * 3
1473
+
axis_max = [-1000000000] * 3
1474
+
1475
+
if global_clamp_size:
1476
+
# Get all object bounds
1477
+
for ob in new_objects:
1478
+
for v in ob.bound_box:
1479
+
for axis, value in enumerate(v):
1480
+
if axis_min[axis] > value:
1481
+
axis_min[axis] = value
1482
+
if axis_max[axis] < value:
1483
+
axis_max[axis] = value
1484
+
1485
+
# Scale objects
1486
+
max_axis = max(axis_max[0] - axis_min[0], axis_max[1] - axis_min[1], axis_max[2] - axis_min[2])
1487
+
scale = 1.0
1488
+
1489
+
while global_clamp_size < max_axis * scale:
1490
+
scale = scale / 10.0
1491
+
1492
+
for obj in new_objects:
1493
+
obj.scale = scale, scale, scale
1494
+
1495
+
progress.leave_substeps("Done.")
1496
+
progress.leave_substeps("Finished importing: %r" % filepath)
1497
+
1498
+
return {'FINISHED'}
+709
xnalara_io_Tools/import_xnalara_model.py
+709
xnalara_io_Tools/import_xnalara_model.py
···
1
+
import copy
2
+
import operator
3
+
import os
4
+
import re
5
+
6
+
import bpy
7
+
from mathutils import Vector
8
+
9
+
from . import (import_xnalara_pose, material_creator, read_ascii_xps,
10
+
read_bin_xps, xps_types)
11
+
from .armature_tools.xnal_armature_utilities import (XnaL_AddRegisterBoneName,
12
+
Xnal_CreateArmatureObject,
13
+
XnaL_CreateBoneCollection,
14
+
XnaL_GetBoneNameByIndex,
15
+
XnaL_ShowHideBones)
16
+
from .utilities.mesh_utilities import create_split_normals
17
+
18
+
# imported XPS directory
19
+
rootDir = ''
20
+
MIN_BONE_LENGHT = 0.005
21
+
22
+
23
+
def coordTransform(coords):
24
+
x, y, z = coords
25
+
z = -z
26
+
return (x, z, y)
27
+
28
+
29
+
def faceTransform(face):
30
+
return [face[0], face[2], face[1]]
31
+
32
+
33
+
def faceTransformList(faces):
34
+
return map(faceTransform, faces)
35
+
36
+
37
+
def uvTransform(uv):
38
+
u = uv[0] + xpsSettings.uvDisplX
39
+
v = 1 + xpsSettings.uvDisplY - uv[1]
40
+
return [u, v]
41
+
42
+
43
+
def rangeFloatToByte(float):
44
+
return int(float * 255) % 256
45
+
46
+
47
+
def rangeByteToFloat(byte):
48
+
return byte / 255
49
+
50
+
51
+
def uvTransformLayers(uvLayers):
52
+
return list(map(uvTransform, uvLayers))
53
+
54
+
55
+
# profile
56
+
def getInputFilename(xpsSettingsAux):
57
+
global xpsSettings
58
+
xpsSettings = xpsSettingsAux
59
+
60
+
blenderImportSetup()
61
+
status = xpsImport()
62
+
blenderImportFinalize()
63
+
return status
64
+
65
+
66
+
def blenderImportSetup():
67
+
# switch to object mode and deselect all
68
+
objectMode()
69
+
bpy.ops.object.select_all(action='DESELECT')
70
+
71
+
72
+
def blenderImportFinalize():
73
+
# switch to object mode
74
+
objectMode()
75
+
76
+
77
+
def objectMode():
78
+
current_mode = bpy.context.mode
79
+
if bpy.context.view_layer.objects.active and current_mode != 'OBJECT':
80
+
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
81
+
82
+
83
+
def loadXpsFile(filename):
84
+
dirpath, file = os.path.split(filename)
85
+
basename, ext = os.path.splitext(file)
86
+
if ext.lower() in ('.mesh', '.xps'):
87
+
xpsData = read_bin_xps.readXpsModel(filename)
88
+
elif ext.lower() in ('.ascii'):
89
+
xpsData = read_ascii_xps.readXpsModel(filename)
90
+
else:
91
+
xpsData = None
92
+
93
+
return xpsData
94
+
95
+
96
+
def makeMesh(meshFullName):
97
+
mesh_data = bpy.data.meshes.new(meshFullName)
98
+
mesh_object = bpy.data.objects.new(mesh_data.name, mesh_data)
99
+
print(f"Created Mesh: {meshFullName}")
100
+
print(f"New Mesh = {mesh_data.name}")
101
+
# bpy.context.scene.update()
102
+
# mesh_da.update()
103
+
return mesh_object
104
+
105
+
106
+
def linkToCollection(collection, obj):
107
+
# Link Object to collection
108
+
collection.objects.link(obj)
109
+
110
+
111
+
def xpsImport():
112
+
global rootDir
113
+
global xpsData
114
+
115
+
print("------------------------------------------------------------")
116
+
print("---------------EXECUTING XPS PYTHON IMPORTER----------------")
117
+
print("------------------------------------------------------------")
118
+
print("Importing file: ", xpsSettings.filename)
119
+
120
+
rootDir, file = os.path.split(xpsSettings.filename)
121
+
print('rootDir: {}'.format(rootDir))
122
+
123
+
xpsData = loadXpsFile(xpsSettings.filename)
124
+
if not xpsData:
125
+
return '{NONE}'
126
+
127
+
# Create New Collection
128
+
fname, fext = os.path.splitext(file)
129
+
xps_collection = bpy.data.collections.get("XPS IMPORT") if (bpy.data.collections.get("XPS IMPORT") is not None) else bpy.data.collections.new("XPS IMPORT")
130
+
if (xps_collection.name not in bpy.context.scene.collection.children):
131
+
bpy.context.scene.collection.children.link(xps_collection)
132
+
133
+
xps_model_collection = bpy.data.collections.new(fname)
134
+
xps_model_optional_objects_collection = bpy.data.collections.new(f"{fname} | OPTIONAL")
135
+
136
+
xps_collection.children.link(xps_model_collection)
137
+
xps_model_collection.children.link(xps_model_optional_objects_collection)
138
+
139
+
# imports the armature
140
+
armature_object = Xnal_CreateArmatureObject()
141
+
if armature_object is not None:
142
+
linkToCollection(xps_model_collection, armature_object)
143
+
XnaL_ImportModelBones(bpy.context, armature_object)
144
+
armature_object.select_set(True)
145
+
146
+
# imports all the meshes
147
+
meshe_objects = importMeshesList(armature_object)
148
+
149
+
if (xpsSettings.separate_optional_objects):
150
+
for mesh_object in meshe_objects:
151
+
object_name = mesh_object.name
152
+
object_name_regions = re.split(r"[1234567890]+_", mesh_object.name, 1)
153
+
154
+
if (len(object_name_regions) > 1):
155
+
object_name = object_name_regions[1]
156
+
157
+
if (object_name[0] in ["+", "-"]) or ("|" in mesh_object.name):
158
+
linkToCollection(xps_model_optional_objects_collection, mesh_object)
159
+
else:
160
+
linkToCollection(xps_model_collection, mesh_object)
161
+
mesh_object.select_set(True)
162
+
else:
163
+
for mesh_object in meshe_objects:
164
+
if (mesh_object.name in xps_model_optional_objects_collection.objects) and (mesh_object.name in xps_model_collection.objects):
165
+
xps_model_collection.objects.unlink(mesh_object)
166
+
else:
167
+
for mesh_object in meshe_objects:
168
+
linkToCollection(xps_model_collection, mesh_object)
169
+
mesh_object.select_set(True)
170
+
171
+
if armature_object:
172
+
armature_object.pose.use_auto_ik = xpsSettings.autoIk
173
+
hideUnusedBones([armature_object])
174
+
boneTailMiddleObject(armature_object, xpsSettings.connectBones)
175
+
176
+
# Import default pose
177
+
if (xpsSettings.importDefaultPose and armature_object):
178
+
if (xpsData.header and xpsData.header.pose):
179
+
import_xnalara_pose.setXpsPose(armature_object, xpsData.header.pose)
180
+
return '{FINISHED}'
181
+
182
+
183
+
def setMinimumLenght(bone):
184
+
default_length = MIN_BONE_LENGHT
185
+
if bone.length == 0:
186
+
bone.tail = bone.head - Vector((0, .001, 0))
187
+
if bone.length < default_length:
188
+
bone.length = default_length
189
+
190
+
191
+
def boneTailMiddleObject(armature_ob, connectBones):
192
+
bpy.context.view_layer.objects.active = armature_ob
193
+
194
+
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
195
+
editBones = armature_ob.data.edit_bones
196
+
boneTailMiddle(editBones, connectBones)
197
+
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
198
+
199
+
200
+
def setBoneConnect(connectBones):
201
+
currMode = bpy.context.mode
202
+
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
203
+
editBones = bpy.context.view_layer.objects.active.data.edit_bones
204
+
connectEditBones(editBones, connectBones)
205
+
bpy.ops.object.mode_set(mode=currMode, toggle=False)
206
+
207
+
208
+
def connectEditBones(editBones, connectBones):
209
+
for bone in editBones:
210
+
if bone.parent:
211
+
if bone.head == bone.parent.tail:
212
+
bone.use_connect = connectBones
213
+
214
+
215
+
def hideBonesByName(armature_objs):
216
+
"""Hide bones that do not affect any mesh."""
217
+
for armature in armature_objs:
218
+
for bone in armature.data.bones:
219
+
if bone.name.lower().startswith('unused'):
220
+
XnaL_ShowHideBones([bone], False)
221
+
222
+
223
+
def hideBonesByVertexGroup(armature_objs):
224
+
"""Hide bones that do not affect any mesh."""
225
+
for armature in armature_objs:
226
+
objs = [obj for obj in armature.children
227
+
if obj.type == 'MESH' and obj.modifiers and [
228
+
modif for modif in obj.modifiers if modif
229
+
and modif.type == 'ARMATURE' and modif.object == armature]]
230
+
231
+
# cycle objects and get all vertex groups
232
+
vertexgroups = set(
233
+
[vg.name for obj in objs if obj.type == 'MESH'
234
+
for vg in obj.vertex_groups])
235
+
236
+
bones = armature.data.bones
237
+
# leafBones = [bone for bone in bones if not bone.children]
238
+
rootBones = [bone for bone in bones if not bone.parent]
239
+
240
+
for bone in rootBones:
241
+
recurBones(bone, vertexgroups, '')
242
+
243
+
244
+
def recurBones(bone, vertexgroups, name):
245
+
visibleChild = False
246
+
for childBone in bone.children:
247
+
aux = recurBones(childBone, vertexgroups, '{} '.format(name))
248
+
visibleChild = visibleChild or aux
249
+
250
+
visibleChain = bone.name in vertexgroups or visibleChild
251
+
if not visibleChain:
252
+
XnaL_ShowHideBones([bone], False)
253
+
return visibleChain
254
+
255
+
256
+
def hideUnusedBones(armature_objs):
257
+
hideBonesByVertexGroup(armature_objs)
258
+
hideBonesByName(armature_objs)
259
+
260
+
261
+
def boneDictRename(filepath, armatureObj):
262
+
boneDictDataRename, boneDictDataRestore = read_ascii_xps.readBoneDict(filepath)
263
+
renameBonesUsingDict(armatureObj, boneDictDataRename)
264
+
265
+
266
+
def boneDictRestore(filepath, armatureObj):
267
+
boneDictDataRename, boneDictDataRestore = read_ascii_xps.readBoneDict(filepath)
268
+
renameBonesUsingDict(armatureObj, boneDictDataRestore)
269
+
270
+
271
+
def renameBonesUsingDict(armatureObj, boneDict):
272
+
getbone = armatureObj.data.bones.get
273
+
for key, value in boneDict.items():
274
+
boneRenamed = getbone(import_xnalara_pose.renameBoneToBlender(key))
275
+
if boneRenamed:
276
+
boneRenamed.name = value
277
+
else:
278
+
boneOriginal = getbone(key)
279
+
if boneOriginal:
280
+
boneOriginal.name = value
281
+
282
+
283
+
def XnaL_ImportModelBones(context: bpy.types.Context, armature_object: bpy.types.Object):
284
+
xps_bones = xpsData.bones
285
+
286
+
if (armature_object is not None) and (armature_object.data is not None) and (armature_object.type == "ARMATURE"):
287
+
armature: bpy.types.Armature = armature_object.data
288
+
289
+
context.view_layer.objects.active = armature_object
290
+
bpy.ops.object.mode_set(mode='EDIT')
291
+
292
+
xps_bone: xps_types.XpsBone
293
+
for xps_bone in xps_bones:
294
+
editBone = armature.edit_bones.new(xps_bone.name)
295
+
XnaL_AddRegisterBoneName(editBone.name)
296
+
297
+
transformedBone = coordTransform(xps_bone.co)
298
+
editBone.head = Vector(transformedBone)
299
+
editBone.tail = Vector(editBone.head) + Vector((0, 0, -.1))
300
+
setMinimumLenght(editBone)
301
+
302
+
for xps_bone in xps_bones:
303
+
editBone: bpy.types.EditBone = armature.edit_bones[xps_bone.id]
304
+
editBone.parent = armature.edit_bones[xps_bone.parentId]
305
+
306
+
context.view_layer.objects.active = armature_object
307
+
bpy.ops.object.mode_set(mode='OBJECT')
308
+
return armature_object
309
+
310
+
311
+
def boneTailMiddle(editBones, connectBones):
312
+
"""Move bone tail to children middle point."""
313
+
twistboneRegex = r'\b(hip)?(twist|ctr|root|adj)\d*\b'
314
+
for bone in editBones:
315
+
if (bone.name.lower() == "root ground" or not bone.parent):
316
+
bone.tail = bone.head.xyz + Vector((0, -.5, 0))
317
+
# elif (bone.name.lower() == "root hips"):
318
+
# bone.tail = bone.head.xyz + Vector((0, .2, 0))
319
+
else:
320
+
childBones = [childBone for childBone in bone.children
321
+
if not (re.search(twistboneRegex, childBone.name))]
322
+
323
+
if childBones:
324
+
# Set tail to children middle
325
+
bone.tail = Vector(map(sum, zip(*(childBone.head.xyz for childBone in childBones)))) / len(childBones)
326
+
else:
327
+
# if no child, set tail acording to parent
328
+
if bone.parent is not None:
329
+
if bone.head.xyz != bone.parent.tail.xyz:
330
+
# Tail to diference between bone and parent
331
+
delta = bone.head.xyz - bone.parent.tail.xyz
332
+
else:
333
+
# Tail to same lenght/direction than parent
334
+
delta = bone.parent.tail.xyz - bone.parent.head.xyz
335
+
bone.tail = bone.head.xyz + delta
336
+
337
+
# Set minimum bone length
338
+
for bone in editBones:
339
+
setMinimumLenght(bone)
340
+
341
+
# Connect Bones to parent
342
+
connectEditBones(editBones, connectBones)
343
+
344
+
345
+
def makeUvs(mesh_da, faces, uvData, vertColors):
346
+
# Create UVLayers
347
+
for i in range(len(uvData[0])):
348
+
mesh_da.uv_layers.new(name="UV{}".format(str(i + 1)))
349
+
if xpsSettings.vColors:
350
+
mesh_da.vertex_colors.new()
351
+
352
+
# Assign UVCoords
353
+
for faceId, face in enumerate(faces):
354
+
for vertId, faceVert in enumerate(face):
355
+
loopdId = (faceId * 3) + vertId
356
+
if xpsSettings.vColors:
357
+
mesh_da.vertex_colors[0].data[loopdId].color = vertColors[faceVert]
358
+
for layerIdx, uvLayer in enumerate(mesh_da.uv_layers):
359
+
uvCoor = uvData[faceVert][layerIdx]
360
+
uvLayer.data[loopdId].uv = Vector(uvCoor)
361
+
362
+
363
+
def createJoinedMeshes():
364
+
meshPartRegex = re.compile(r'(!.*)*([\d]+nPart)*!')
365
+
sortedMeshesList = sorted(xpsData.meshes, key=operator.attrgetter('name'))
366
+
joinedMeshesNames = list(
367
+
{meshPartRegex.sub('', mesh.name, 0) for mesh in sortedMeshesList})
368
+
joinedMeshesNames.sort()
369
+
newMeshes = []
370
+
for joinedMeshName in joinedMeshesNames:
371
+
# for each joinedMeshName generate a list of meshes to join
372
+
meshesToJoin = [mesh for mesh in sortedMeshesList if meshPartRegex.sub(
373
+
'', mesh.name, 0) == joinedMeshName]
374
+
375
+
totalVertexCount = 0
376
+
vertexCount = 0
377
+
meshCount = 0
378
+
379
+
meshName = None
380
+
textures = None
381
+
vertex = None
382
+
faces = None
383
+
384
+
# new name for the unified mesh
385
+
meshName = meshPartRegex.sub('', meshesToJoin[0].name, 0)
386
+
# all the meshses share the same textures
387
+
textures = meshesToJoin[0].textures
388
+
# all the meshses share the uv layers count
389
+
uvCount = meshesToJoin[0].uvCount
390
+
# all the new joined mesh names
391
+
vertex = []
392
+
faces = []
393
+
for mesh in meshesToJoin:
394
+
vertexCount = 0
395
+
meshCount = meshCount + 1
396
+
397
+
if len(meshesToJoin) > 1 or meshesToJoin[0] not in sortedMeshesList:
398
+
# unify vertex
399
+
for vert in mesh.vertices:
400
+
vertexCount = vertexCount + 1
401
+
newVertice = xps_types.XpsVertex(
402
+
vert.id + totalVertexCount, vert.co, vert.norm, vert.vColor, vert.uv, vert.boneWeights)
403
+
vertex.append(newVertice)
404
+
# unify faces
405
+
for face in mesh.faces:
406
+
newFace = [face[0] + totalVertexCount, face[1]
407
+
+ totalVertexCount, face[2] + totalVertexCount]
408
+
faces.append(newFace)
409
+
else:
410
+
vertex = mesh.vertices
411
+
faces = mesh.faces
412
+
totalVertexCount = totalVertexCount + vertexCount
413
+
414
+
# Creates the nuw unified mesh
415
+
xpsMesh = xps_types.XpsMesh(meshName, textures, vertex, faces, uvCount)
416
+
newMeshes.append(xpsMesh)
417
+
return newMeshes
418
+
419
+
420
+
def importMeshesList(armature_ob):
421
+
if xpsSettings.joinMeshParts:
422
+
newMeshes = createJoinedMeshes()
423
+
else:
424
+
newMeshes = xpsData.meshes
425
+
importedMeshes = [importMesh(armature_ob, meshInfo)
426
+
for meshInfo in newMeshes]
427
+
return [mesh for mesh in importedMeshes if mesh]
428
+
429
+
430
+
def generateVertexKey(vertex):
431
+
if xpsSettings.joinMeshRips:
432
+
key = str(vertex.co) + str(vertex.norm)
433
+
else:
434
+
key = str(vertex.id) + str(vertex.co) + str(vertex.norm)
435
+
return key
436
+
437
+
438
+
def getVertexId(vertex, mapVertexKeys, mergedVertList):
439
+
vertexKey = generateVertexKey(vertex)
440
+
vertexID = mapVertexKeys.get(vertexKey)
441
+
if vertexID is None:
442
+
vertexID = len(mergedVertList)
443
+
mapVertexKeys[vertexKey] = vertexID
444
+
newVert = copy.copy(vertex)
445
+
newVert.id = vertexID
446
+
mergedVertList.append(newVert)
447
+
else:
448
+
mergedVertList[vertexID].merged = True
449
+
return vertexID
450
+
451
+
452
+
def makeVertexDict(vertexDict, mergedVertList, uvLayers, vertColor, vertices):
453
+
mapVertexKeys = {}
454
+
uvLayerAppend = uvLayers.append
455
+
vertColorAppend = vertColor.append
456
+
vertexDictAppend = vertexDict.append
457
+
458
+
for vertex in vertices:
459
+
vColor = vertex.vColor
460
+
uvLayerAppend(list(map(uvTransform, vertex.uv)))
461
+
vertColorAppend(list(map(rangeByteToFloat, vColor)))
462
+
vertexID = getVertexId(vertex, mapVertexKeys, mergedVertList)
463
+
# old ID to new ID
464
+
vertexDictAppend(vertexID)
465
+
466
+
467
+
def importMesh(armature_object, meshInfo):
468
+
# boneCount = len(xpsData.bones)
469
+
useSeams = xpsSettings.markSeams
470
+
# Create Mesh
471
+
meshFullName = meshInfo.name
472
+
print()
473
+
print('---*** Importing Mesh {} ***---'.format(meshFullName))
474
+
475
+
# Load UV Layers Count
476
+
uvLayerCount = meshInfo.uvCount
477
+
print('UV Layer Count: {}'.format(str(uvLayerCount)))
478
+
479
+
# Load Textures Count
480
+
textureCount = len(meshInfo.textures)
481
+
print('Texture Count: {}'.format(str(textureCount)))
482
+
483
+
mesh_object = None
484
+
vertCount = len(meshInfo.vertices)
485
+
if vertCount >= 3:
486
+
vertexDict = []
487
+
mergedVertList = []
488
+
uvLayers = []
489
+
vertColors = []
490
+
makeVertexDict(vertexDict, mergedVertList, uvLayers, vertColors, meshInfo.vertices)
491
+
492
+
# new ID to riginal ID
493
+
vertexOrig = [[] for x in range(len(mergedVertList))]
494
+
for vertId, vert in enumerate(vertexDict):
495
+
vertexOrig[vert].append(vertId)
496
+
497
+
mergedVertices = {}
498
+
seamEdgesDict = {}
499
+
facesData = []
500
+
for face in meshInfo.faces:
501
+
v1Old = face[0]
502
+
v2Old = face[1]
503
+
v3Old = face[2]
504
+
v1New = vertexDict[v1Old]
505
+
v2New = vertexDict[v2Old]
506
+
v3New = vertexDict[v3Old]
507
+
oldFace = ((v1Old, v2Old, v3Old))
508
+
facesData.append((v1New, v2New, v3New))
509
+
510
+
if (useSeams):
511
+
if (mergedVertList[v1New].merged
512
+
or mergedVertList[v2New].merged
513
+
or mergedVertList[v3New].merged):
514
+
515
+
findMergedEdges(seamEdgesDict, vertexDict, mergedVertList, mergedVertices, oldFace)
516
+
517
+
# merge Vertices of same coord and normal?
518
+
mergeByNormal = True
519
+
if mergeByNormal:
520
+
vertices = mergedVertList
521
+
facesList = facesData
522
+
else:
523
+
vertices = meshInfo.vertices
524
+
facesList = meshInfo.faces
525
+
526
+
# Create Mesh
527
+
mesh_object = makeMesh(meshFullName)
528
+
mesh_data: bpy.types.Mesh = mesh_object.data
529
+
530
+
coords = []
531
+
normals = []
532
+
# vrtxList = []
533
+
# nbVrtx = []
534
+
535
+
for vertex in vertices:
536
+
unitnormal = Vector(vertex.norm).normalized()
537
+
coords.append(coordTransform(vertex.co))
538
+
normals.append(coordTransform(unitnormal))
539
+
# vertColors.append(vertex.vColor)
540
+
# uvLayers.append(uvTransformLayers(vertex.uv))
541
+
542
+
# Create Faces
543
+
faces = list(faceTransformList(facesList))
544
+
mesh_data.from_pydata(coords, [], faces)
545
+
mesh_data.polygons.foreach_set(
546
+
"use_smooth", [True] * len(mesh_data.polygons))
547
+
548
+
# speedup!!!!
549
+
if xpsSettings.markSeams:
550
+
markSeams(mesh_data, seamEdgesDict)
551
+
552
+
# Make UVLayers
553
+
origFaces = faceTransformList(meshInfo.faces)
554
+
makeUvs(mesh_data, origFaces, uvLayers, vertColors)
555
+
556
+
if (xpsData.header):
557
+
flags = xpsData.header.flags
558
+
else:
559
+
flags = read_bin_xps.flagsDefault()
560
+
561
+
# Make Material
562
+
material_creator.makeMaterial(xpsSettings, rootDir, mesh_data, meshInfo, flags)
563
+
564
+
if (armature_object is not None) and (mesh_object is not None):
565
+
setArmatureModifier(armature_object, mesh_object)
566
+
setParent(armature_object, mesh_object)
567
+
568
+
makeVertexGroups(mesh_object, vertices)
569
+
570
+
if (armature_object is not None) and (mesh_object is not None):
571
+
XnaL_CreateBoneCollection(armature_object, mesh_object)
572
+
573
+
# import custom normals
574
+
b_import_vertex_normals = xpsSettings.importNormals
575
+
576
+
# unique_smooth_groups = True
577
+
578
+
if (b_import_vertex_normals):
579
+
b_mesh_was_corrected = create_split_normals(mesh_object, normals)
580
+
581
+
print("Geometry Corrected:", b_mesh_was_corrected)
582
+
583
+
return mesh_object
584
+
585
+
586
+
def markSeams(mesh_da, seamEdgesDict):
587
+
# use Dict to speedup search
588
+
edge_keys = {val: index for index, val in enumerate(mesh_da.edge_keys)}
589
+
# mesh_da.show_edge_seams = True
590
+
for vert1, list in seamEdgesDict.items():
591
+
for vert2 in list:
592
+
edgeIdx = None
593
+
if vert1 < vert2:
594
+
edgeIdx = edge_keys[(vert1, vert2)]
595
+
elif vert2 < vert1:
596
+
edgeIdx = edge_keys[(vert2, vert1)]
597
+
if edgeIdx:
598
+
mesh_da.edges[edgeIdx].use_seam = True
599
+
600
+
601
+
def findMergedEdges(seamEdgesDict, vertexDict, mergedVertList, mergedVertices, oldFace):
602
+
findMergedVert(seamEdgesDict, vertexDict, mergedVertList, mergedVertices, oldFace, oldFace[0])
603
+
findMergedVert(seamEdgesDict, vertexDict, mergedVertList, mergedVertices, oldFace, oldFace[1])
604
+
findMergedVert(seamEdgesDict, vertexDict, mergedVertList, mergedVertices, oldFace, oldFace[2])
605
+
606
+
607
+
def findMergedVert(seamEdgesDict, vertexDict, mergedVertList, mergedVertices, oldFace, mergedVert):
608
+
v1Old = oldFace[0]
609
+
v2Old = oldFace[1]
610
+
v3Old = oldFace[2]
611
+
# v1New = vertexDict[v1Old]
612
+
# v2New = vertexDict[v2Old]
613
+
# v3New = vertexDict[v3Old]
614
+
vertX = vertexDict[mergedVert]
615
+
if (mergedVertList[vertX].merged):
616
+
# List Merged vertices original Create
617
+
if (mergedVertices.get(vertX) is None):
618
+
mergedVertices[vertX] = []
619
+
620
+
# List Merged vertices original Loop
621
+
for facesList in mergedVertices[vertX]:
622
+
# Check if original vertices merge
623
+
624
+
i = 0
625
+
matchV1 = False
626
+
while not matchV1 and i < 3:
627
+
if ((vertX == vertexDict[facesList[i]]) and mergedVert != facesList[i]):
628
+
if (mergedVert != v1Old):
629
+
checkEdgePairForSeam(i, seamEdgesDict, vertexDict, vertX, v1Old, facesList)
630
+
if (mergedVert != v2Old):
631
+
checkEdgePairForSeam(i, seamEdgesDict, vertexDict, vertX, v2Old, facesList)
632
+
if (mergedVert != v3Old):
633
+
checkEdgePairForSeam(i, seamEdgesDict, vertexDict, vertX, v3Old, facesList)
634
+
matchV1 = True
635
+
i = i + 1
636
+
637
+
# List Merged vertices original Append
638
+
mergedVertices[vertX].append((v1Old, v2Old, v3Old))
639
+
640
+
641
+
def checkEdgePairForSeam(i, seamEdgesDict, vertexDict, mergedVert, vert, facesList):
642
+
if (i != 0):
643
+
makeSeamEdgeDict(0, seamEdgesDict, vertexDict, mergedVert, vert, facesList)
644
+
if (i != 1):
645
+
makeSeamEdgeDict(1, seamEdgesDict, vertexDict, mergedVert, vert, facesList)
646
+
if (i != 2):
647
+
makeSeamEdgeDict(2, seamEdgesDict, vertexDict, mergedVert, vert, facesList)
648
+
649
+
650
+
def makeSeamEdgeDict(i, seamEdgesDict, vertexDict, mergedVert, vert, facesList):
651
+
if (vertexDict[vert] == vertexDict[facesList[i]]):
652
+
if (seamEdgesDict.get(mergedVert) is None):
653
+
seamEdgesDict[mergedVert] = []
654
+
seamEdgesDict[mergedVert].append(vertexDict[vert])
655
+
656
+
657
+
def setArmatureModifier(armature_ob, mesh_ob):
658
+
mod = mesh_ob.modifiers.new(type="ARMATURE", name="Armature")
659
+
mod.use_vertex_groups = True
660
+
mod.object = armature_ob
661
+
662
+
663
+
def setParent(armature_ob, mesh_ob):
664
+
mesh_ob.parent = armature_ob
665
+
666
+
667
+
def makeVertexGroups(mesh_ob, vertices):
668
+
"""Make vertex groups and assign weights."""
669
+
# blender limits vertexGroupNames to 63 chars
670
+
# armatures = [mesh_ob.find_armature()]
671
+
armatures = mesh_ob.find_armature()
672
+
for vertex in vertices:
673
+
assignVertexGroup(vertex, armatures, mesh_ob)
674
+
675
+
676
+
def assignVertexGroup(vert, armature, mesh_ob):
677
+
for i in range(len(vert.boneWeights)):
678
+
vertBoneWeight = vert.boneWeights[i]
679
+
boneIdx = vertBoneWeight.id
680
+
vertexWeight = vertBoneWeight.weight
681
+
if vertexWeight != 0:
682
+
# use original index to get current bone name in blender
683
+
boneName = XnaL_GetBoneNameByIndex(boneIdx)
684
+
if boneName:
685
+
vertGroup = mesh_ob.vertex_groups.get(boneName)
686
+
if not vertGroup:
687
+
vertGroup = mesh_ob.vertex_groups.new(name=boneName)
688
+
vertGroup.add([vert.id], vertexWeight, 'REPLACE')
689
+
690
+
691
+
if __name__ == "__main__":
692
+
693
+
readfilename = r'C:\XPS Tutorial\Yaiba MOMIJIII\momi3.mesh.mesh'
694
+
uvDisplX = 0
695
+
uvDisplY = 0
696
+
impDefPose = True
697
+
joinMeshRips = True
698
+
joinMeshParts = True
699
+
vColors = True
700
+
connectBones = True
701
+
autoIk = True
702
+
importNormals = True
703
+
separate_optional_objects = True
704
+
705
+
xpsSettings = xps_types.XpsImportSettings(
706
+
readfilename, uvDisplX, uvDisplY, impDefPose, joinMeshRips,
707
+
markSeams, vColors,
708
+
joinMeshParts, connectBones, autoIk, importNormals, separate_optional_objects)
709
+
getInputFilename(xpsSettings)
+256
xnalara_io_Tools/import_xnalara_pose.py
+256
xnalara_io_Tools/import_xnalara_pose.py
···
1
+
from math import radians
2
+
import os
3
+
import re
4
+
5
+
from . import read_ascii_xps
6
+
from .timing import timing
7
+
import bpy
8
+
from mathutils import Euler, Matrix, Vector
9
+
10
+
11
+
PLACE_HOLDER = r'*side*'
12
+
RIGHT_BLENDER_SUFFIX = r'.R'
13
+
LEFT_BLENDER_SUFFIX = r'.L'
14
+
RIGHT_XPS_SUFFIX = r'right'
15
+
LEFT_XPS_SUFFIX = r'left'
16
+
17
+
18
+
def changeBoneNameToBlender(boneName, xpsSuffix, blenderSuffix):
19
+
''' '''
20
+
# replace suffix with place holder
21
+
newName = re.sub(xpsSuffix, PLACE_HOLDER, boneName, flags=re.I)
22
+
# remove doble spaces
23
+
newName = re.sub(r'\s+', ' ', newName, flags=re.I)
24
+
newName = str.strip(newName)
25
+
if boneName != newName:
26
+
newName = '{0}{1}'.format(newName, blenderSuffix)
27
+
28
+
return newName.strip()
29
+
30
+
31
+
def renameBoneToBlender(oldName):
32
+
newName = oldName
33
+
if PLACE_HOLDER not in oldName.lower():
34
+
if re.search(LEFT_XPS_SUFFIX, oldName, flags=re.I):
35
+
newName = changeBoneNameToBlender(oldName, LEFT_XPS_SUFFIX, LEFT_BLENDER_SUFFIX)
36
+
37
+
if re.search(RIGHT_XPS_SUFFIX, oldName, flags=re.I):
38
+
newName = changeBoneNameToBlender(oldName, RIGHT_XPS_SUFFIX, RIGHT_BLENDER_SUFFIX)
39
+
40
+
return newName
41
+
42
+
43
+
def renameBonesToBlender(armatures_obs):
44
+
# currActive = bpy.context.active_object
45
+
for armature in armatures_obs:
46
+
for bone in armature.data.bones:
47
+
bone.name = renameBoneToBlender(bone.name)
48
+
49
+
50
+
def changeBoneNameToXps(oldName, blenderSuffix, xpsSuffix):
51
+
# remove '.R' '.L' from the end of the name
52
+
newName = re.sub('{0}{1}'.format(re.escape(blenderSuffix), '$'), '', oldName, flags=re.I)
53
+
# remove doble spaces
54
+
newName = re.sub(r'\s+', ' ', newName, flags=re.I)
55
+
# replcace place holder
56
+
newName = re.sub(re.escape(PLACE_HOLDER), xpsSuffix, newName, flags=re.I)
57
+
return newName
58
+
59
+
60
+
def renameBoneToXps(oldName):
61
+
newName = oldName
62
+
if PLACE_HOLDER in oldName.lower():
63
+
if re.search(re.escape(LEFT_BLENDER_SUFFIX), oldName, re.I):
64
+
newName = changeBoneNameToXps(oldName, LEFT_BLENDER_SUFFIX, LEFT_XPS_SUFFIX)
65
+
66
+
if re.search(re.escape(RIGHT_BLENDER_SUFFIX), oldName, re.I):
67
+
newName = changeBoneNameToXps(oldName, RIGHT_BLENDER_SUFFIX, RIGHT_XPS_SUFFIX)
68
+
69
+
return newName.strip()
70
+
71
+
72
+
def renameBonesToXps(armatures_obs):
73
+
for armature in armatures_obs:
74
+
for bone in armature.data.bones:
75
+
bone.name = renameBoneToXps(bone.name)
76
+
77
+
78
+
def getInputPoseSequence(filename):
79
+
filepath, file = os.path.split(filename)
80
+
basename, ext = os.path.splitext(file)
81
+
poseSuffix = re.sub(r'\d+$', '', basename)
82
+
83
+
files = []
84
+
for f in [file for file in os.listdir(filepath) if os.path.splitext(file)[1] == '.pose']:
85
+
fName, fExt = os.path.splitext(f)
86
+
fPoseSuffix = re.sub(r'\d+$', '', fName)
87
+
if poseSuffix == fPoseSuffix:
88
+
files.append(f)
89
+
90
+
files.sort()
91
+
92
+
initialFrame = bpy.context.scene.frame_current
93
+
for poseFile in files:
94
+
frame = bpy.context.scene.frame_current
95
+
poseFilename = os.path.join(filepath, poseFile)
96
+
importPoseAsKeyframe(poseFilename)
97
+
bpy.context.scene.frame_current = frame + 1
98
+
99
+
bpy.context.scene.frame_current = initialFrame
100
+
101
+
102
+
def importPoseAsKeyframe(filename):
103
+
getInputFilename(filename)
104
+
105
+
106
+
def getInputFilename(filename):
107
+
108
+
blenderImportSetup()
109
+
xpsImport(filename)
110
+
blenderImportFinalize()
111
+
112
+
113
+
def blenderImportSetup():
114
+
pass
115
+
116
+
117
+
def blenderImportFinalize():
118
+
pass
119
+
120
+
121
+
def loadXpsFile(filename):
122
+
# dirpath, file = os.path.split(filename)
123
+
# basename, ext = os.path.splitext(file)
124
+
xpsData = read_ascii_xps.readXpsPose(filename)
125
+
126
+
return xpsData
127
+
128
+
129
+
@timing
130
+
def xpsImport(filename):
131
+
global rootDir
132
+
global xpsData
133
+
134
+
print("------------------------------------------------------------")
135
+
print("---------------EXECUTING XPS PYTHON IMPORTER----------------")
136
+
print("------------------------------------------------------------")
137
+
print("Importing Pose: ", filename)
138
+
139
+
rootDir, file = os.path.split(filename)
140
+
print('rootDir: {}'.format(rootDir))
141
+
142
+
xpsData = loadXpsFile(filename)
143
+
144
+
importPose()
145
+
146
+
147
+
def importPose():
148
+
boneCount = len(xpsData)
149
+
print('Importing Pose', str(boneCount), 'bones')
150
+
151
+
armature = bpy.context.active_object
152
+
setXpsPose(armature, xpsData)
153
+
154
+
155
+
def resetPose(armature):
156
+
for poseBone in armature.pose.bones:
157
+
poseBone.matrix_basis = Matrix()
158
+
159
+
160
+
def setXpsPose(armature, xpsData):
161
+
currentMode = bpy.context.mode
162
+
currentObj = bpy.context.active_object
163
+
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
164
+
165
+
context = bpy.context
166
+
rigobj = armature
167
+
context.view_layer.objects.active = rigobj
168
+
rigobj.select_set(state=True)
169
+
170
+
bpy.ops.object.mode_set(mode='POSE')
171
+
bpy.ops.pose.select_all(action='DESELECT')
172
+
for boneData in xpsData.items():
173
+
xpsBoneData = boneData[1]
174
+
boneName = xpsBoneData.boneName
175
+
poseBone = rigobj.pose.bones.get(boneName)
176
+
if poseBone is None:
177
+
poseBone = rigobj.pose.bones.get(renameBoneToBlender(boneName))
178
+
179
+
if poseBone:
180
+
xpsPoseBone(poseBone, xpsBoneData)
181
+
poseBone.bone.select = True
182
+
183
+
bpy.ops.anim.keyframe_insert(type='LocRotScale')
184
+
bpy.ops.object.posemode_toggle()
185
+
context.view_layer.objects.active = currentObj
186
+
bpy.ops.object.mode_set(mode=currentMode)
187
+
188
+
189
+
def xpsPoseBone(poseBone, xpsBoneData):
190
+
xpsBoneRotate(poseBone, xpsBoneData.rotDelta)
191
+
xpsBoneTranslate(poseBone, xpsBoneData.coordDelta)
192
+
xpsBoneScale(poseBone, xpsBoneData.scale)
193
+
194
+
195
+
def xpsBoneRotToEuler(rotDelta):
196
+
xRad = radians(rotDelta.x)
197
+
yRad = radians(rotDelta.y)
198
+
zRad = radians(rotDelta.z)
199
+
return Euler((xRad, yRad, zRad), 'YXZ')
200
+
201
+
202
+
def vectorTransform(vec):
203
+
x = vec.x
204
+
y = vec.y
205
+
z = vec.z
206
+
z = -z
207
+
newVec = Vector((x, z, y))
208
+
return newVec
209
+
210
+
211
+
def vectorTransformTranslate(vec):
212
+
x = vec.x
213
+
y = vec.y
214
+
z = vec.z
215
+
z = -z
216
+
newVec = Vector((x, z, y))
217
+
return newVec
218
+
219
+
220
+
def vectorTransformScale(vec):
221
+
x = vec.x
222
+
y = vec.y
223
+
z = vec.z
224
+
newVec = Vector((x, y, z))
225
+
return newVec
226
+
227
+
228
+
def xpsBoneRotate(poseBone, rotDelta):
229
+
current_rottion_mode = poseBone.rotation_mode
230
+
poseBone.rotation_mode = 'QUATERNION'
231
+
rotation = vectorTransform(rotDelta)
232
+
eulerRot = xpsBoneRotToEuler(rotation)
233
+
origRot = poseBone.bone.matrix_local.to_quaternion() # LOCAL EditBone
234
+
235
+
rotation = eulerRot.to_quaternion()
236
+
poseBone.rotation_quaternion = origRot.inverted() @ rotation @ origRot
237
+
poseBone.rotation_mode = current_rottion_mode
238
+
239
+
240
+
def xpsBoneTranslate(poseBone, coordsDelta):
241
+
translate = coordsDelta
242
+
translate = vectorTransformTranslate(coordsDelta)
243
+
origRot = poseBone.bone.matrix_local.to_quaternion() # LOCAL EditBone
244
+
245
+
poseBone.location = origRot.inverted() @ translate
246
+
247
+
248
+
def xpsBoneScale(poseBone, scale):
249
+
newScale = vectorTransformScale(scale)
250
+
poseBone.scale = newScale
251
+
252
+
253
+
if __name__ == "__main__":
254
+
readPosefilename1 = r"G:\3DModeling\XNALara\XNALara_XPS\dataTest\Models\Queen's Blade\hide Kelta.pose"
255
+
256
+
getInputFilename(readPosefilename1)
+659
xnalara_io_Tools/material_creator.py
+659
xnalara_io_Tools/material_creator.py
···
1
+
import os
2
+
import random
3
+
4
+
import bpy
5
+
from mathutils import Vector
6
+
7
+
from . import xps_const, xps_material
8
+
9
+
ALPHA_MODE_CHANNEL = 'CHANNEL_PACKED'
10
+
# Nodes Layout
11
+
NODE_FRAME = 'NodeFrame'
12
+
13
+
# Nodes Shaders
14
+
BSDF_DIFFUSE_NODE = 'ShaderNodeBsdfDiffuse'
15
+
BSDF_EMISSION_NODE = 'ShaderNodeEmission'
16
+
BSDF_GLOSSY_NODE = 'ShaderNodeBsdfGlossy'
17
+
PRINCIPLED_SHADER_NODE = 'ShaderNodeBsdfPrincipled'
18
+
BSDF_TRANSPARENT_NODE = 'ShaderNodeBsdfTransparent'
19
+
BSDF_GLASS_NODE = 'ShaderNodeBsdfGlass'
20
+
SHADER_ADD_NODE = 'ShaderNodeAddShader'
21
+
SHADER_MIX_NODE = 'ShaderNodeMixShader'
22
+
23
+
# Nodes Color
24
+
RGB_MIX_NODE = 'ShaderNodeMixRGB'
25
+
INVERT_NODE = 'ShaderNodeInvert'
26
+
27
+
# Nodes Input
28
+
TEXTURE_IMAGE_NODE = 'ShaderNodeTexImage'
29
+
ENVIRONMENT_IMAGE_NODE = 'ShaderNodeTexEnvironment'
30
+
COORD_NODE = 'ShaderNodeTexCoord'
31
+
32
+
# Nodes Outputs
33
+
OUTPUT_NODE = 'ShaderNodeOutputMaterial'
34
+
35
+
# Nodes Vector
36
+
MAPPING_NODE = 'ShaderNodeMapping'
37
+
NORMAL_MAP_NODE = 'ShaderNodeNormalMap'
38
+
39
+
# Nodes Convert
40
+
SHADER_NODE_MATH = 'ShaderNodeMath'
41
+
RGB_TO_BW_NODE = 'ShaderNodeRGBToBW'
42
+
SHADER_NODE_SEPARATE_RGB = 'ShaderNodeSeparateRGB'
43
+
SHADER_NODE_COMBINE_RGB = 'ShaderNodeCombineRGB'
44
+
45
+
# Node Groups
46
+
NODE_GROUP = 'ShaderNodeGroup'
47
+
NODE_GROUP_INPUT = 'NodeGroupInput'
48
+
NODE_GROUP_OUTPUT = 'NodeGroupOutput'
49
+
SHADER_NODE_TREE = 'ShaderNodeTree'
50
+
51
+
# Node Custom Groups
52
+
INVERT_CHANNEL_NODE = 'Invert Channel'
53
+
MIX_NORMAL_NODE = 'Normal Mix'
54
+
NORMAL_MASK_NODE = 'Normal Mask'
55
+
XPS_SHADER_NODE = 'XPS Shader'
56
+
57
+
# Sockets
58
+
NODE_SOCKET_COLOR = 'NodeSocketColor'
59
+
NODE_SOCKET_FLOAT = 'NodeSocketFloat'
60
+
NODE_SOCKET_FLOAT_FACTOR = 'NodeSocketFloatFactor'
61
+
NODE_SOCKET_SHADER = 'NodeSocketShader'
62
+
NODE_SOCKET_VECTOR = 'NodeSocketVector'
63
+
64
+
# Colors
65
+
DIFFUSE_COLOR = (0.9, 0.9, 0.9, 1)
66
+
SPECULAR_COLOR = (0.707, 0.707, 0.707, 1)
67
+
LIGHTMAP_COLOR = (1, 1, 1, 1)
68
+
NORMAL_COLOR = (0.5, 0.5, 1, 1)
69
+
GREY_COLOR = (0.5, 0.5, 0.5, 1)
70
+
71
+
# TODO
72
+
73
+
74
+
def makeMaterialOutputNode(node_tree):
75
+
node = node_tree.nodes.new(OUTPUT_NODE)
76
+
node.location = 600, 0
77
+
return node
78
+
79
+
80
+
def makeImageNode(node_tree):
81
+
node = node_tree.nodes.new(TEXTURE_IMAGE_NODE)
82
+
node.location = -400, 0
83
+
return node
84
+
85
+
86
+
def makeEnvironmentNode(node_tree):
87
+
node = node_tree.nodes.new(ENVIRONMENT_IMAGE_NODE)
88
+
node.location = -400, 0
89
+
return node
90
+
91
+
92
+
def makeTransparencyNode(node_tree):
93
+
node = node_tree.nodes.new(BSDF_TRANSPARENT_NODE)
94
+
node.location = -400, -200
95
+
return node
96
+
97
+
98
+
def makeShaderMixNode(node_tree):
99
+
node = node_tree.nodes.new(SHADER_MIX_NODE)
100
+
node.location = -400, -400
101
+
return node
102
+
103
+
104
+
def randomColor():
105
+
randomR = random.random()
106
+
randomG = random.random()
107
+
randomB = random.random()
108
+
return (randomR, randomG, randomB)
109
+
110
+
# TODO missing version check
111
+
112
+
113
+
def setNodeScale(node, value):
114
+
# Change from 2.80 to 2.81
115
+
if 'Scale' in node.inputs:
116
+
node.inputs['Scale'].default_value = (value, value, value)
117
+
else:
118
+
node.scale = (value, value, value)
119
+
120
+
121
+
def getNodeGroup(node_tree, group):
122
+
node = node_tree.nodes.new(NODE_GROUP)
123
+
node.node_tree = bpy.data.node_groups[group]
124
+
return node
125
+
126
+
# TODO make platform independent
127
+
128
+
129
+
def makeImageFilepath(rootDir, textureFilename):
130
+
return os.path.join(rootDir, textureFilename)
131
+
132
+
# TODO make platform independent
133
+
134
+
135
+
def loadImage(textureFilepath):
136
+
textureFilename = os.path.basename(textureFilepath)
137
+
fileRoot, fileExt = os.path.splitext(textureFilename)
138
+
139
+
if (os.path.isfile(textureFilepath)):
140
+
print("Loading Texture: " + textureFilename)
141
+
image = bpy.data.images.load(filepath=textureFilepath, check_existing=True)
142
+
else:
143
+
print("Warning. Texture not found " + textureFilename)
144
+
image = bpy.data.images.new(
145
+
name=textureFilename, width=1024, height=1024, alpha=True,
146
+
float_buffer=False)
147
+
image.source = 'FILE'
148
+
image.filepath = textureFilepath
149
+
image.alpha_mode = ALPHA_MODE_CHANNEL
150
+
151
+
return image
152
+
153
+
154
+
def newTextureSlot(materialData):
155
+
textureSlot = materialData.texture_slots.add()
156
+
textureSlot.texture_coords = "UV"
157
+
# textureSlot.texture = imgTex
158
+
textureSlot.use_map_alpha = True
159
+
textureSlot.alpha_factor = 1.0
160
+
return textureSlot
161
+
162
+
163
+
def makeMaterial(xpsSettings, rootDir, mesh_da, meshInfo, flags):
164
+
# Create the material for Nodes
165
+
meshFullName = meshInfo.name
166
+
materialData = bpy.data.materials.new(meshFullName)
167
+
mesh_da.materials.append(materialData)
168
+
169
+
# Create
170
+
makeNodesMaterial(xpsSettings, materialData, rootDir, mesh_da, meshInfo, flags)
171
+
172
+
173
+
def makeNodesMaterial(xpsSettings, material: bpy.types.Material, rootDir, mesh_da, meshInfo, flags):
174
+
textureFilepaths = meshInfo.textures
175
+
material.use_nodes = True
176
+
node_tree = material.node_tree
177
+
node_tree.nodes.clear()
178
+
179
+
meshFullName = material.name
180
+
renderType = xps_material.makeRenderType(meshFullName)
181
+
renderGroup = xps_material.RenderGroup(renderType)
182
+
param1 = renderType.texRepeater1
183
+
param2 = renderType.texRepeater2
184
+
strengthFac = renderType.specularity
185
+
186
+
bUseAlpha = renderGroup.rgAlpha
187
+
188
+
# Nodes
189
+
ouputNode = makeMaterialOutputNode(node_tree)
190
+
xpsShadeNode = getNodeGroup(node_tree, XPS_SHADER_NODE)
191
+
ouputNode.location = xpsShadeNode.location + Vector((700, 400))
192
+
coordNode = node_tree.nodes.new(COORD_NODE)
193
+
coordNode.location = xpsShadeNode.location + Vector((-2500, 400))
194
+
195
+
if (bUseAlpha == True):
196
+
if (bpy.app.version[:2] in [(4, 0), (4, 1)]):
197
+
material.blend_method = "HASHED"
198
+
if (bpy.app.version[:2] in [(4, 2), (4, 3), (4, 4)]):
199
+
material.surface_render_method = "DITHERED"
200
+
201
+
node_tree.links.new(xpsShadeNode.outputs['Shader'], ouputNode.inputs['Surface'])
202
+
203
+
bump1Image = None
204
+
bump2Image = None
205
+
maskGroupNode = None
206
+
normalMixNode = None
207
+
diffuseImgNode = None
208
+
normalMapNode = None
209
+
210
+
col_width = 200
211
+
imagesPosX = -col_width * 6
212
+
imagesPosY = 400
213
+
214
+
# TODO make platform independent
215
+
imageFilepath = None
216
+
for texIndex, textureInfo in enumerate(textureFilepaths):
217
+
textureFilename = textureInfo.file
218
+
# textureUvLayer = textureInfo.uvLayer
219
+
textureBasename = os.path.basename(textureFilename)
220
+
221
+
# image mapping node
222
+
mappingCoordNode = node_tree.nodes.new(MAPPING_NODE)
223
+
# load image
224
+
imageFilepath = makeImageFilepath(rootDir, textureBasename)
225
+
imageNode = makeImageNode(node_tree)
226
+
imageNode.image = loadImage(imageFilepath)
227
+
node_tree.links.new(mappingCoordNode.outputs['Vector'], imageNode.inputs['Vector'])
228
+
imageNode.location = xpsShadeNode.location + Vector((imagesPosX, imagesPosY * 0))
229
+
mappingCoordNode.location = imageNode.location + Vector((-400, 0))
230
+
node_tree.links.new(coordNode.outputs['UV'], mappingCoordNode.inputs['Vector'])
231
+
232
+
if texIndex >= len(renderGroup.rgTexType):
233
+
continue
234
+
235
+
texType = xps_material.TextureType(renderGroup.rgTexType[texIndex])
236
+
if (texType == xps_material.TextureType.DIFFUSE):
237
+
imageNode.label = 'Diffuse'
238
+
node_tree.links.new(imageNode.outputs['Color'], xpsShadeNode.inputs['Diffuse'])
239
+
imageNode.location = xpsShadeNode.location + Vector((imagesPosX, imagesPosY * 1))
240
+
mappingCoordNode.location = imageNode.location + Vector((-400, 0))
241
+
diffuseImgNode = imageNode
242
+
if (bUseAlpha == True):
243
+
node_tree.links.new(imageNode.outputs['Alpha'], xpsShadeNode.inputs['Alpha'])
244
+
elif (texType == xps_material.TextureType.LIGHT):
245
+
imageNode.label = 'Light Map'
246
+
imageNode.location = xpsShadeNode.location + Vector((imagesPosX, imagesPosY * 0))
247
+
mappingCoordNode.location = imageNode.location + Vector((-400, 0))
248
+
node_tree.links.new(imageNode.outputs['Color'], xpsShadeNode.inputs['Lightmap'])
249
+
elif (texType == xps_material.TextureType.BUMP):
250
+
imageNode.label = 'Bump Map'
251
+
imageNode.image.colorspace_settings.is_data = True
252
+
node_tree.links.new(imageNode.outputs['Color'], xpsShadeNode.inputs['Bump Map'])
253
+
imageNode.location = xpsShadeNode.location + Vector((imagesPosX, imagesPosY * -2))
254
+
mappingCoordNode.location = imageNode.location + Vector((-400, 0))
255
+
elif (texType == xps_material.TextureType.SPECULAR):
256
+
imageNode.label = 'Specular'
257
+
imageNode.image.colorspace_settings.is_data = True
258
+
node_tree.links.new(imageNode.outputs['Color'], xpsShadeNode.inputs['Specular'])
259
+
imageNode.location = xpsShadeNode.location + Vector((imagesPosX, imagesPosY * -1))
260
+
mappingCoordNode.location = imageNode.location + Vector((-400, 0))
261
+
elif (texType == xps_material.TextureType.ENVIRONMENT):
262
+
imageNode.label = 'Reflection'
263
+
environmentNode = makeEnvironmentNode(node_tree)
264
+
environmentNode.image = imageNode.image
265
+
node_tree.nodes.remove(imageNode)
266
+
imageNode = environmentNode
267
+
imageNode.location = xpsShadeNode.location + Vector((imagesPosX, imagesPosY * 2))
268
+
mappingCoordNode.location = imageNode.location + Vector((-400, 0))
269
+
node_tree.links.new(coordNode.outputs['Reflection'], mappingCoordNode.inputs['Vector'])
270
+
node_tree.links.new(mappingCoordNode.outputs['Vector'], environmentNode.inputs['Vector'])
271
+
node_tree.links.new(imageNode.outputs['Color'], xpsShadeNode.inputs['Environment'])
272
+
elif (texType == xps_material.TextureType.MASK):
273
+
imageNode.label = 'Bump Mask'
274
+
imageNode.image.colorspace_settings.is_data = True
275
+
imageNode.location = xpsShadeNode.location + Vector((imagesPosX, imagesPosY * -3))
276
+
mappingCoordNode.location = imageNode.location + Vector((-400, 0))
277
+
node_tree.links.new(imageNode.outputs['Color'], xpsShadeNode.inputs['Bump Mask'])
278
+
elif (texType == xps_material.TextureType.BUMP1):
279
+
imageNode.label = 'Micro Bump 1'
280
+
imageNode.image.colorspace_settings.is_data = True
281
+
texRepeater = None
282
+
if renderGroup.renderGroupNum in (28, 29):
283
+
texRepeater = renderType.texRepeater2
284
+
else:
285
+
texRepeater = renderType.texRepeater1
286
+
setNodeScale(mappingCoordNode, texRepeater)
287
+
node_tree.links.new(coordNode.outputs['UV'], mappingCoordNode.inputs['Vector'])
288
+
node_tree.links.new(mappingCoordNode.outputs['Vector'], imageNode.inputs['Vector'])
289
+
node_tree.links.new(imageNode.outputs['Color'], xpsShadeNode.inputs['MicroBump 1'])
290
+
imageNode.location = xpsShadeNode.location + Vector((imagesPosX, imagesPosY * -4))
291
+
mappingCoordNode.location = imageNode.location + Vector((-400, 0))
292
+
elif (texType == xps_material.TextureType.BUMP2):
293
+
imageNode.label = 'Micro Bump 2'
294
+
imageNode.image.colorspace_settings.is_data = True
295
+
texRepeater = renderType.texRepeater2
296
+
setNodeScale(mappingCoordNode, texRepeater)
297
+
node_tree.links.new(coordNode.outputs['UV'], mappingCoordNode.inputs['Vector'])
298
+
node_tree.links.new(mappingCoordNode.outputs['Vector'], imageNode.inputs['Vector'])
299
+
node_tree.links.new(imageNode.outputs['Color'], xpsShadeNode.inputs['MicroBump 2'])
300
+
imageNode.location = xpsShadeNode.location + Vector((imagesPosX, imagesPosY * -5))
301
+
mappingCoordNode.location = imageNode.location + Vector((-400, 0))
302
+
elif (texType == xps_material.TextureType.EMISSION):
303
+
imageNode.label = 'Emission Map'
304
+
imageNode.location = xpsShadeNode.location + Vector((imagesPosX, imagesPosY * 2))
305
+
mappingCoordNode.location = imageNode.location + Vector((-400, 0))
306
+
if renderGroup.renderGroupNum in (36, 37):
307
+
setNodeScale(mappingCoordNode, param1)
308
+
node_tree.links.new(imageNode.outputs['Color'], xpsShadeNode.inputs['Emission'])
309
+
elif (texType == xps_material.TextureType.EMISSION_MINI):
310
+
imageNode.label = 'Mini Emission'
311
+
imageNode.location = xpsShadeNode.location + Vector((imagesPosX, imagesPosY * -6))
312
+
mappingCoordNode.location = imageNode.location + Vector((-400, 0))
313
+
setNodeScale(mappingCoordNode, param1)
314
+
node_tree.links.new(imageNode.outputs['Color'], xpsShadeNode.inputs['Emission'])
315
+
316
+
317
+
def mix_normal_group():
318
+
# create a group
319
+
if MIX_NORMAL_NODE in bpy.data.node_groups:
320
+
return bpy.data.node_groups[MIX_NORMAL_NODE]
321
+
node_tree = bpy.data.node_groups.new(name=MIX_NORMAL_NODE, type=SHADER_NODE_TREE)
322
+
node_tree.nodes.clear()
323
+
324
+
mainNormalSeparateNode = node_tree.nodes.new(SHADER_NODE_SEPARATE_RGB)
325
+
mainNormalSeparateNode.location = Vector((0, 0))
326
+
detailNormalSeparateNode = node_tree.nodes.new(SHADER_NODE_SEPARATE_RGB)
327
+
detailNormalSeparateNode.location = mainNormalSeparateNode.location + Vector((0, -200))
328
+
mainNormalCombineNode = node_tree.nodes.new(SHADER_NODE_COMBINE_RGB)
329
+
mainNormalCombineNode.location = mainNormalSeparateNode.location + Vector((200, 0))
330
+
detailNormalCombineNode = node_tree.nodes.new(SHADER_NODE_COMBINE_RGB)
331
+
detailNormalCombineNode.location = mainNormalSeparateNode.location + Vector((200, -200))
332
+
333
+
multiplyBlueNode = node_tree.nodes.new(SHADER_NODE_MATH)
334
+
multiplyBlueNode.operation = 'MULTIPLY'
335
+
multiplyBlueNode.inputs[1].default_value = 1
336
+
multiplyBlueNode.location = mainNormalSeparateNode.location + Vector((200, -400))
337
+
338
+
addRGBNode = node_tree.nodes.new(RGB_MIX_NODE)
339
+
addRGBNode.blend_type = 'ADD'
340
+
addRGBNode.inputs['Fac'].default_value = 1
341
+
addRGBNode.location = mainNormalSeparateNode.location + Vector((400, 0))
342
+
343
+
subsRGBNode = node_tree.nodes.new(RGB_MIX_NODE)
344
+
subsRGBNode.blend_type = 'SUBTRACT'
345
+
subsRGBNode.inputs['Fac'].default_value = 1
346
+
subsRGBNode.location = mainNormalSeparateNode.location + Vector((600, -100))
347
+
348
+
separateRedBlueNode = node_tree.nodes.new(SHADER_NODE_SEPARATE_RGB)
349
+
separateRedBlueNode.location = mainNormalSeparateNode.location + Vector((800, -100))
350
+
combineFinalNode = node_tree.nodes.new(SHADER_NODE_COMBINE_RGB)
351
+
combineFinalNode.location = mainNormalSeparateNode.location + Vector((1000, -200))
352
+
353
+
# TODO accidental node group wipes
354
+
# Input/Output
355
+
group_inputs = node_tree.nodes.new(NODE_GROUP_INPUT)
356
+
group_inputs.location = mainNormalSeparateNode.location + Vector((-200, -100))
357
+
group_outputs = node_tree.nodes.new(NODE_GROUP_OUTPUT)
358
+
group_outputs.location = mainNormalSeparateNode.location + Vector((1200, -100))
359
+
node_tree.interface.clear()
360
+
361
+
# Input Sockets
362
+
main_normal_socket = node_tree.interface.new_socket("Main", in_out="INPUT", socket_type="NodeSocketColor")
363
+
main_normal_socket.default_value = NORMAL_COLOR
364
+
detail_normal_socket = node_tree.interface.new_socket("Detail", in_out="INPUT", socket_type="NodeSocketColor")
365
+
detail_normal_socket.default_value = NORMAL_COLOR
366
+
367
+
# Output Sockets
368
+
output_value = node_tree.interface.new_socket("Color", in_out="OUTPUT", socket_type="NodeSocketColor")
369
+
370
+
# Links Input
371
+
links = node_tree.links
372
+
links.new(group_inputs.outputs['Main'], mainNormalSeparateNode.inputs['Image'])
373
+
links.new(group_inputs.outputs['Detail'], detailNormalSeparateNode.inputs['Image'])
374
+
375
+
links.new(mainNormalSeparateNode.outputs['R'], mainNormalCombineNode.inputs['R'])
376
+
links.new(mainNormalSeparateNode.outputs['G'], mainNormalCombineNode.inputs['G'])
377
+
links.new(mainNormalSeparateNode.outputs['B'], multiplyBlueNode.inputs[0])
378
+
links.new(detailNormalSeparateNode.outputs['R'], detailNormalCombineNode.inputs['R'])
379
+
links.new(detailNormalSeparateNode.outputs['G'], detailNormalCombineNode.inputs['G'])
380
+
links.new(detailNormalSeparateNode.outputs['B'], multiplyBlueNode.inputs[1])
381
+
382
+
links.new(mainNormalCombineNode.outputs['Image'], addRGBNode.inputs[1])
383
+
links.new(detailNormalCombineNode.outputs['Image'], addRGBNode.inputs[2])
384
+
links.new(addRGBNode.outputs['Color'], subsRGBNode.inputs[1])
385
+
386
+
links.new(subsRGBNode.outputs['Color'], separateRedBlueNode.inputs['Image'])
387
+
388
+
links.new(separateRedBlueNode.outputs['R'], combineFinalNode.inputs['R'])
389
+
links.new(separateRedBlueNode.outputs['G'], combineFinalNode.inputs['G'])
390
+
links.new(multiplyBlueNode.outputs['Value'], combineFinalNode.inputs['B'])
391
+
392
+
links.new(combineFinalNode.outputs['Image'], group_outputs.inputs['Color'])
393
+
394
+
return node_tree
395
+
396
+
397
+
def invert_channel_group():
398
+
# create a group
399
+
if INVERT_CHANNEL_NODE in bpy.data.node_groups:
400
+
return bpy.data.node_groups[INVERT_CHANNEL_NODE]
401
+
node_tree = bpy.data.node_groups.new(name=INVERT_CHANNEL_NODE, type=SHADER_NODE_TREE)
402
+
node_tree.nodes.clear()
403
+
404
+
separateRgbNode = node_tree.nodes.new(SHADER_NODE_SEPARATE_RGB)
405
+
separateRgbNode.location = Vector((0, 0))
406
+
407
+
invertRNode = node_tree.nodes.new(INVERT_NODE)
408
+
invertRNode.inputs[0].default_value = 0
409
+
invertRNode.location = separateRgbNode.location + Vector((200, 40))
410
+
invertGNode = node_tree.nodes.new(INVERT_NODE)
411
+
invertGNode.inputs[0].default_value = 1
412
+
invertGNode.location = separateRgbNode.location + Vector((200, -60))
413
+
invertBNode = node_tree.nodes.new(INVERT_NODE)
414
+
invertBNode.inputs[0].default_value = 0
415
+
invertBNode.location = separateRgbNode.location + Vector((200, -160))
416
+
417
+
combineRgbNode = node_tree.nodes.new(SHADER_NODE_COMBINE_RGB)
418
+
combineRgbNode.location = separateRgbNode.location + Vector((600, 0))
419
+
420
+
# Input/Output
421
+
group_inputs = node_tree.nodes.new(NODE_GROUP_INPUT)
422
+
group_inputs.location = separateRgbNode.location + Vector((-200, -100))
423
+
group_outputs = node_tree.nodes.new(NODE_GROUP_OUTPUT)
424
+
group_outputs.location = combineRgbNode.location + Vector((200, 0))
425
+
node_tree.interface.clear()
426
+
427
+
# Input/Output Sockets
428
+
input_color = node_tree.interface.new_socket("Color", in_out="INPUT", socket_type="NodeSocketColor")
429
+
input_color.default_value = GREY_COLOR
430
+
invert_r = node_tree.interface.new_socket("R", in_out="INPUT", socket_type="NodeSocketFloat")
431
+
invert_r.subtype = "FACTOR"
432
+
invert_r.default_value = 0
433
+
invert_r.min_value = 0
434
+
invert_r.max_value = 1
435
+
invert_g = node_tree.interface.new_socket("G", in_out="INPUT", socket_type="NodeSocketFloat")
436
+
invert_g.subtype = "FACTOR"
437
+
invert_g.default_value = 0
438
+
invert_g.min_value = 0
439
+
invert_g.max_value = 1
440
+
invert_b = node_tree.interface.new_socket("B", in_out="INPUT", socket_type="NodeSocketFloat")
441
+
invert_b.subtype = "FACTOR"
442
+
invert_b.default_value = 0
443
+
invert_b.min_value = 0
444
+
invert_b.max_value = 1
445
+
446
+
output_value = node_tree.interface.new_socket("Color", in_out="OUTPUT", socket_type="NodeSocketColor")
447
+
448
+
# Links Input
449
+
links = node_tree.links
450
+
links.new(group_inputs.outputs['Color'], separateRgbNode.inputs['Image'])
451
+
links.new(group_inputs.outputs['R'], invertRNode.inputs['Fac'])
452
+
links.new(group_inputs.outputs['G'], invertGNode.inputs['Fac'])
453
+
links.new(group_inputs.outputs['B'], invertBNode.inputs['Fac'])
454
+
links.new(separateRgbNode.outputs['R'], invertRNode.inputs['Color'])
455
+
links.new(separateRgbNode.outputs['G'], invertGNode.inputs['Color'])
456
+
links.new(separateRgbNode.outputs['B'], invertBNode.inputs['Color'])
457
+
458
+
links.new(invertRNode.outputs['Color'], combineRgbNode.inputs['R'])
459
+
links.new(invertGNode.outputs['Color'], combineRgbNode.inputs['G'])
460
+
links.new(invertBNode.outputs['Color'], combineRgbNode.inputs['B'])
461
+
462
+
links.new(combineRgbNode.outputs['Image'], group_outputs.inputs['Color'])
463
+
464
+
return node_tree
465
+
466
+
467
+
def normal_mask_group():
468
+
# create a group
469
+
if NORMAL_MASK_NODE in bpy.data.node_groups:
470
+
return bpy.data.node_groups[NORMAL_MASK_NODE]
471
+
node_tree = bpy.data.node_groups.new(name=NORMAL_MASK_NODE, type=SHADER_NODE_TREE)
472
+
node_tree.nodes.clear()
473
+
474
+
maskSeparateNode = node_tree.nodes.new(SHADER_NODE_SEPARATE_RGB)
475
+
476
+
# Mask Red Channel
477
+
maskRedPowerNode = node_tree.nodes.new(SHADER_NODE_MATH)
478
+
maskRedPowerNode.operation = 'POWER'
479
+
maskRedPowerNode.inputs[1].default_value = 1
480
+
maskRedPowerNode.location = maskSeparateNode.location + Vector((200, 100))
481
+
482
+
maskMixRedNode = node_tree.nodes.new(RGB_MIX_NODE)
483
+
maskMixRedNode.blend_type = 'MIX'
484
+
maskMixRedNode.inputs[1].default_value = (NORMAL_COLOR)
485
+
maskMixRedNode.location = maskRedPowerNode.location + Vector((200, 100))
486
+
487
+
# Mask Green Channel
488
+
maskGreenPowerNode = node_tree.nodes.new(SHADER_NODE_MATH)
489
+
maskGreenPowerNode.operation = 'POWER'
490
+
maskGreenPowerNode.inputs[1].default_value = 1
491
+
maskGreenPowerNode.location = maskSeparateNode.location + Vector((200, -100))
492
+
493
+
maskMixGreenNode = node_tree.nodes.new(RGB_MIX_NODE)
494
+
maskMixGreenNode.blend_type = 'MIX'
495
+
maskMixGreenNode.inputs[1].default_value = (NORMAL_COLOR)
496
+
maskMixGreenNode.location = maskGreenPowerNode.location + Vector((200, -100))
497
+
498
+
# Mix Masked Normals
499
+
normalMixNode = getNodeGroup(node_tree, MIX_NORMAL_NODE)
500
+
normalMixNode.location = maskSeparateNode.location + Vector((600, 0))
501
+
502
+
node_tree.links.new(maskSeparateNode.outputs['R'], maskRedPowerNode.inputs[0])
503
+
node_tree.links.new(maskSeparateNode.outputs['G'], maskGreenPowerNode.inputs[0])
504
+
node_tree.links.new(maskRedPowerNode.outputs['Value'], maskMixRedNode.inputs[0])
505
+
node_tree.links.new(maskGreenPowerNode.outputs['Value'], maskMixGreenNode.inputs[0])
506
+
node_tree.links.new(maskMixRedNode.outputs['Color'], normalMixNode.inputs['Main'])
507
+
node_tree.links.new(maskMixGreenNode.outputs['Color'], normalMixNode.inputs['Detail'])
508
+
509
+
# Input/Output
510
+
group_inputs = node_tree.nodes.new(NODE_GROUP_INPUT)
511
+
group_inputs.location = maskSeparateNode.location + Vector((-200, -100))
512
+
group_outputs = node_tree.nodes.new(NODE_GROUP_OUTPUT)
513
+
group_outputs.location = normalMixNode.location + Vector((200, 0))
514
+
node_tree.interface.clear()
515
+
516
+
# Input/Output Sockets
517
+
mask_color = node_tree.interface.new_socket("Mask", in_out="INPUT", socket_type="NodeSocketColor")
518
+
mask_color.default_value = LIGHTMAP_COLOR
519
+
normalMain_color = node_tree.interface.new_socket("Normal1", in_out="INPUT", socket_type="NodeSocketColor")
520
+
normalMain_color.default_value = NORMAL_COLOR
521
+
normalDetail_color = node_tree.interface.new_socket("Normal2", in_out="INPUT", socket_type="NodeSocketColor")
522
+
normalDetail_color.default_value = NORMAL_COLOR
523
+
524
+
output_value = node_tree.interface.new_socket("Normal", in_out="OUTPUT", socket_type="NodeSocketColor")
525
+
526
+
# Link Inputs/Output
527
+
node_tree.links.new(group_inputs.outputs['Mask'], maskSeparateNode.inputs['Image'])
528
+
node_tree.links.new(group_inputs.outputs['Normal1'], maskMixRedNode.inputs[2])
529
+
node_tree.links.new(group_inputs.outputs['Normal2'], maskMixGreenNode.inputs[2])
530
+
node_tree.links.new(normalMixNode.outputs['Color'], group_outputs.inputs['Normal'])
531
+
532
+
533
+
def create_group_nodes():
534
+
mix_normal_group()
535
+
invert_channel_group()
536
+
normal_mask_group()
537
+
xps_shader_group()
538
+
539
+
540
+
def xps_shader_group():
541
+
# create a group
542
+
if XPS_SHADER_NODE in bpy.data.node_groups:
543
+
return bpy.data.node_groups[XPS_SHADER_NODE]
544
+
shader = bpy.data.node_groups.new(name=XPS_SHADER_NODE, type=SHADER_NODE_TREE)
545
+
546
+
# Group inputs
547
+
group_input = shader.nodes.new(NODE_GROUP_INPUT)
548
+
group_input.location += Vector((-1200, 0))
549
+
550
+
group_output = shader.nodes.new(NODE_GROUP_OUTPUT)
551
+
group_output.location += Vector((600, 0))
552
+
553
+
output_diffuse = shader.interface.new_socket("Diffuse", in_out="INPUT", socket_type="NodeSocketColor")
554
+
output_diffuse.default_value = (DIFFUSE_COLOR)
555
+
output_lightmap = shader.interface.new_socket("Lightmap", in_out="INPUT", socket_type="NodeSocketColor")
556
+
output_lightmap.default_value = (LIGHTMAP_COLOR)
557
+
output_specular = shader.interface.new_socket("Specular", in_out="INPUT", socket_type="NodeSocketColor")
558
+
output_specular.default_value = (SPECULAR_COLOR)
559
+
output_emission = shader.interface.new_socket("Emission", in_out="INPUT", socket_type="NodeSocketColor")
560
+
output_normal = shader.interface.new_socket("Bump Map", in_out="INPUT", socket_type="NodeSocketColor")
561
+
output_normal.default_value = (NORMAL_COLOR)
562
+
output_bump_mask = shader.interface.new_socket("Bump Mask", in_out="INPUT", socket_type="NodeSocketColor")
563
+
output_microbump1 = shader.interface.new_socket("MicroBump 1", in_out="INPUT", socket_type="NodeSocketColor")
564
+
output_microbump1.default_value = (NORMAL_COLOR)
565
+
output_microbump2 = shader.interface.new_socket("MicroBump 2", in_out="INPUT", socket_type="NodeSocketColor")
566
+
output_microbump2.default_value = (NORMAL_COLOR)
567
+
output_environment = shader.interface.new_socket("Environment", in_out="INPUT", socket_type="NodeSocketColor")
568
+
output_alpha = shader.interface.new_socket("Alpha", in_out="INPUT", socket_type="NodeSocketFloat")
569
+
output_alpha.subtype = "FACTOR"
570
+
output_alpha.min_value = 0
571
+
output_alpha.max_value = 1
572
+
output_alpha.default_value = 1
573
+
574
+
# Group outputs
575
+
shader.interface.new_socket("Shader", in_out="OUTPUT", socket_type="NodeSocketShader")
576
+
577
+
principled = shader.nodes.new(PRINCIPLED_SHADER_NODE)
578
+
579
+
# Diffuse and Lightmap
580
+
mix_rgb = shader.nodes.new(RGB_MIX_NODE)
581
+
mix_rgb.location += Vector((-800, 100))
582
+
mix_rgb.inputs[0].default_value = 1
583
+
mix_rgb.blend_type = 'MULTIPLY'
584
+
585
+
shader.links.new(group_input.outputs['Diffuse'], mix_rgb.inputs[1])
586
+
shader.links.new(group_input.outputs['Lightmap'], mix_rgb.inputs[2])
587
+
shader.links.new(mix_rgb.outputs['Color'], principled.inputs['Base Color'])
588
+
589
+
# Specular
590
+
bw = shader.nodes.new(RGB_TO_BW_NODE)
591
+
bw.location += Vector((-800, -100))
592
+
pow = shader.nodes.new(SHADER_NODE_MATH)
593
+
pow.location += Vector((-600, -100))
594
+
pow.inputs[1].default_value = 2
595
+
pow.operation = 'POWER'
596
+
inv = shader.nodes.new(INVERT_NODE)
597
+
inv.location += Vector((-400, -100))
598
+
599
+
shader.links.new(group_input.outputs['Specular'], bw.inputs['Color'])
600
+
shader.links.new(bw.outputs['Val'], pow.inputs[0])
601
+
shader.links.new(pow.outputs['Value'], inv.inputs['Color'])
602
+
shader.links.new(inv.outputs['Color'], principled.inputs['Roughness'])
603
+
604
+
# Alpha & Emission
605
+
shader.links.new(group_input.outputs['Alpha'], principled.inputs['Alpha'])
606
+
shader.links.new(group_input.outputs['Emission'], principled.inputs['Emission Color'])
607
+
608
+
# Normals
609
+
normal_invert_channel = getNodeGroup(shader, INVERT_CHANNEL_NODE)
610
+
normal_invert_channel.location += Vector((-800, -500))
611
+
# normal_invert_channel.inputs['R'].default_value = flags[xps_const.TANGENT_SPACE_RED]
612
+
# normal_invert_channel.inputs['G'].default_value = flags[xps_const.TANGENT_SPACE_GREEN]
613
+
# normal_invert_channel.inputs['B'].default_value = flags[xps_const.TANGENT_SPACE_BLUE]
614
+
shader.links.new(group_input.outputs['Bump Map'], normal_invert_channel.inputs['Color'])
615
+
616
+
microbump1_invert_channel = getNodeGroup(shader, INVERT_CHANNEL_NODE)
617
+
microbump1_invert_channel.location += Vector((-800, -700))
618
+
# microbump1_invert_channel.inputs['R'].default_value = flags[xps_const.TANGENT_SPACE_RED]
619
+
# microbump1_invert_channel.inputs['G'].default_value = flags[xps_const.TANGENT_SPACE_GREEN]
620
+
# microbump1_invert_channel.inputs['B'].default_value = flags[xps_const.TANGENT_SPACE_BLUE]
621
+
shader.links.new(group_input.outputs['MicroBump 1'], microbump1_invert_channel.inputs['Color'])
622
+
623
+
microbump2_invert_channel = getNodeGroup(shader, INVERT_CHANNEL_NODE)
624
+
microbump2_invert_channel.location += Vector((-800, -900))
625
+
# microbump2_invert_channel.inputs['R'].default_value = flags[xps_const.TANGENT_SPACE_RED]
626
+
# microbump2_invert_channel.inputs['G'].default_value = flags[xps_const.TANGENT_SPACE_GREEN]
627
+
# microbump2_invert_channel.inputs['B'].default_value = flags[xps_const.TANGENT_SPACE_BLUE]
628
+
shader.links.new(group_input.outputs['MicroBump 2'], microbump2_invert_channel.inputs['Color'])
629
+
630
+
normal_mask = getNodeGroup(shader, NORMAL_MASK_NODE)
631
+
normal_mask.location += Vector((-600, -600))
632
+
shader.links.new(group_input.outputs['Bump Mask'], normal_mask.inputs['Mask'])
633
+
634
+
normal_mix = getNodeGroup(shader, MIX_NORMAL_NODE)
635
+
normal_mix.location += Vector((-400, -500))
636
+
637
+
normal_map = shader.nodes.new(NORMAL_MAP_NODE)
638
+
normal_map.location += Vector((-200, -500))
639
+
640
+
shader.links.new(microbump1_invert_channel.outputs['Color'], normal_mask.inputs['Normal1'])
641
+
shader.links.new(microbump2_invert_channel.outputs['Color'], normal_mask.inputs['Normal2'])
642
+
643
+
shader.links.new(normal_mask.outputs['Normal'], normal_mix.inputs['Detail'])
644
+
shader.links.new(normal_invert_channel.outputs['Color'], normal_mix.inputs['Main'])
645
+
shader.links.new(normal_mix.outputs['Color'], normal_map.inputs['Color'])
646
+
shader.links.new(normal_map.outputs['Normal'], principled.inputs['Normal'])
647
+
648
+
# Emission
649
+
emission_shader = shader.nodes.new(BSDF_EMISSION_NODE)
650
+
emission_shader.location += Vector((100, 200))
651
+
shader_add = shader.nodes.new(SHADER_ADD_NODE)
652
+
shader_add.location += Vector((300, 100))
653
+
654
+
shader.links.new(group_input.outputs['Environment'], emission_shader.inputs['Color'])
655
+
shader.links.new(emission_shader.outputs['Emission'], shader_add.inputs[0])
656
+
shader.links.new(principled.outputs['BSDF'], shader_add.inputs[1])
657
+
shader.links.new(shader_add.outputs['Shader'], group_output.inputs[0])
658
+
659
+
return shader
+206
xnalara_io_Tools/mock_xps_data.py
+206
xnalara_io_Tools/mock_xps_data.py
···
1
+
from getpass import getuser
2
+
from socket import gethostname
3
+
4
+
from . import bin_ops
5
+
from . import xps_const
6
+
from . import xps_types
7
+
import bpy
8
+
9
+
10
+
def mockData():
11
+
xpsHeader = buildHeader()
12
+
bones = buildBones()
13
+
meshes = buildMeshes()
14
+
xpsData = xps_types.XpsData(xpsHeader, bones, meshes)
15
+
16
+
return xpsData
17
+
18
+
19
+
def fillPoseString(poseBytes):
20
+
poseLenghtUnround = len(poseBytes)
21
+
poseLenght = bin_ops.roundToMultiple(
22
+
poseLenghtUnround, xps_const.ROUND_MULTIPLE)
23
+
emptyFill = b'0' * (poseLenght - poseLenghtUnround)
24
+
return poseBytes + emptyFill
25
+
26
+
27
+
def getPoseStringLength(poseString):
28
+
return len(poseString)
29
+
30
+
31
+
def bonePoseCount(poseString):
32
+
boneList = poseString.split('\n')
33
+
return len(boneList) - 1
34
+
35
+
36
+
def buildHeader(poseString=''):
37
+
invertUserName = getuser()[::-1]
38
+
invertHostName = gethostname()[::-1]
39
+
header = xps_types.XpsHeader()
40
+
header.magic_number = xps_const.MAGIC_NUMBER
41
+
header.version_mayor = xps_const.XPS_VERSION_MAYOR
42
+
header.version_minor = xps_const.XPS_VERSION_MINOR
43
+
header.xna_aral = xps_const.XNA_ARAL
44
+
header.machine = invertHostName
45
+
header.user = invertUserName
46
+
header.files = f'{invertUserName}@{bpy.data.filepath}'
47
+
# header.settings = bytes([0])*
48
+
# (xps_const.SETTINGS_LEN * xps_const.ROUND_MULTIPLE)
49
+
50
+
boneCount = bonePoseCount(poseString)
51
+
poseBytes = poseString.encode(xps_const.ENCODING_WRITE)
52
+
default_pose = fillPoseString(poseBytes)
53
+
poseLengthUnround = getPoseStringLength(poseString)
54
+
55
+
var_1 = bin_ops.writeUInt32(180) # Hash
56
+
var_2 = bin_ops.writeUInt32(3) # Items
57
+
58
+
var_3 = bin_ops.writeUInt32(1) # Type
59
+
var_4 = bin_ops.writeUInt32(poseLengthUnround) # Pose Lenght Unround
60
+
var_5 = bin_ops.writeUInt32(boneCount) # Pose Bone Counts
61
+
# POSE DATA
62
+
var_6 = bin_ops.writeUInt32(2) # Type
63
+
var_7 = bin_ops.writeUInt32(4) # Count
64
+
var_8 = bin_ops.writeUInt32(4) # Info
65
+
var_9 = bin_ops.writeUInt32(2) # Count N1
66
+
var_10 = bin_ops.writeUInt32(1) # Count N2
67
+
var_11 = bin_ops.writeUInt32(3) # Count N3
68
+
var_12 = bin_ops.writeUInt32(0) # Count N4
69
+
var_13 = bin_ops.writeUInt32(4) # Type
70
+
var_14 = bin_ops.writeUInt32(3) # Count
71
+
var_15 = bin_ops.writeUInt32(5) # Info
72
+
var_16 = bin_ops.writeUInt32(4)
73
+
var_17 = bin_ops.writeUInt32(0)
74
+
var_18 = bin_ops.writeUInt32(256)
75
+
76
+
header_empty = b''
77
+
header_empty += var_6
78
+
header_empty += var_7
79
+
header_empty += var_8
80
+
header_empty += var_9
81
+
header_empty += var_10
82
+
header_empty += var_11
83
+
header_empty += var_12
84
+
header_empty += var_13
85
+
header_empty += var_14
86
+
header_empty += var_15
87
+
header_empty += var_16
88
+
header_empty += var_17
89
+
header_empty += var_18
90
+
91
+
header_unk = var_1 + var_2 + var_3
92
+
header_pose = var_4 + var_5 + default_pose
93
+
empty_count = ((xps_const.SETTINGS_LEN - len(header_empty)) // 4)
94
+
header_empty += bin_ops.writeUInt32(0) * empty_count
95
+
96
+
settings = header_unk + header_pose + header_empty
97
+
header.settingsLen = len(settings) // 4
98
+
header.settings = settings
99
+
100
+
# logHeader(header)
101
+
return header
102
+
103
+
104
+
def buildBones():
105
+
bones = []
106
+
107
+
id = 0
108
+
name = 'bone1'
109
+
co = [0, 0, 0]
110
+
parentId = -1
111
+
bone = xps_types.XpsBone(id, name, co, parentId)
112
+
bones.append(bone)
113
+
114
+
id = 1
115
+
name = 'bone2'
116
+
co = [0.5, 0.5, 0.5]
117
+
parentId = 0
118
+
bone = xps_types.XpsBone(id, name, co, parentId)
119
+
bones.append(bone)
120
+
return bones
121
+
122
+
123
+
def buildMeshes():
124
+
meshes = []
125
+
meshName = 'Mesh1'
126
+
uvLayerCount = 1
127
+
128
+
# Textures
129
+
textures = []
130
+
texId = 0
131
+
textureFile = 'textutefile1.png'
132
+
uvLayerId = 0
133
+
xpsTexture = xps_types.XpsTexture(texId, textureFile, uvLayerId)
134
+
textures.append(xpsTexture)
135
+
136
+
texId = 1
137
+
textureFile = 'textutefile2.png'
138
+
uvLayerId = 0
139
+
xpsTexture = xps_types.XpsTexture(texId, textureFile, uvLayerId)
140
+
textures.append(xpsTexture)
141
+
142
+
# Vertices
143
+
vertex = []
144
+
145
+
# Vertex1
146
+
vertexId = 0
147
+
coord = (1, 0, 0)
148
+
normal = (0, 0, 1)
149
+
vertexColor = (255, 255, 255, 0)
150
+
uvs = []
151
+
uvs.append((.2, .4))
152
+
boneWeights = (
153
+
xps_types.BoneWeight(0, 0),
154
+
xps_types.BoneWeight(0, 0),
155
+
xps_types.BoneWeight(0, 0),
156
+
xps_types.BoneWeight(0, 0))
157
+
xpsVertex = xps_types.XpsVertex(
158
+
vertexId, coord, normal, vertexColor, uvs, boneWeights)
159
+
160
+
# Vertex2
161
+
vertexId = 1
162
+
coord = (0, 1, 0)
163
+
normal = (0, 1, 0)
164
+
vertexColor = (255, 255, 255, 0)
165
+
uvs = []
166
+
uvs.append((.3, .5))
167
+
boneWeights = (
168
+
xps_types.BoneWeight(0, 0),
169
+
xps_types.BoneWeight(0, 0),
170
+
xps_types.BoneWeight(0, 0),
171
+
xps_types.BoneWeight(0, 0))
172
+
xpsVertex = xps_types.XpsVertex(
173
+
vertexId, coord, normal, vertexColor, uvs, boneWeights)
174
+
vertex.append(xpsVertex)
175
+
176
+
# Vertex3
177
+
vertexId = 2
178
+
coord = (0, 0, 1)
179
+
normal = (1, 0, 0)
180
+
vertexColor = (255, 255, 255, 0)
181
+
uvs = []
182
+
uvs.append((.3, .9))
183
+
boneWeights = (
184
+
xps_types.BoneWeight(0, 0),
185
+
xps_types.BoneWeight(0, 0),
186
+
xps_types.BoneWeight(0, 0),
187
+
xps_types.BoneWeight(0, 0))
188
+
xpsVertex = xps_types.XpsVertex(
189
+
vertexId, coord, normal, vertexColor, uvs, boneWeights)
190
+
vertex.append(xpsVertex)
191
+
192
+
faces = []
193
+
face = (0, 1, 2)
194
+
faces.append(face)
195
+
196
+
xpsMesh = xps_types.XpsMesh(
197
+
meshName, textures, vertex, faces, uvLayerCount)
198
+
meshes.append(xpsMesh)
199
+
200
+
return meshes
201
+
202
+
203
+
if __name__ == "__main__":
204
+
print('BUILD')
205
+
xx = mockData()
206
+
print('FINISH')
+1
xnalara_io_Tools/modules/ALXAddonUpdater/.github/FUNDING.yml
+1
xnalara_io_Tools/modules/ALXAddonUpdater/.github/FUNDING.yml
···
1
+
ko_fi: housearhal
+674
xnalara_io_Tools/modules/ALXAddonUpdater/LICENSE
+674
xnalara_io_Tools/modules/ALXAddonUpdater/LICENSE
···
1
+
GNU GENERAL PUBLIC LICENSE
2
+
Version 3, 29 June 2007
3
+
4
+
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
5
+
Everyone is permitted to copy and distribute verbatim copies
6
+
of this license document, but changing it is not allowed.
7
+
8
+
Preamble
9
+
10
+
The GNU General Public License is a free, copyleft license for
11
+
software and other kinds of works.
12
+
13
+
The licenses for most software and other practical works are designed
14
+
to take away your freedom to share and change the works. By contrast,
15
+
the GNU General Public License is intended to guarantee your freedom to
16
+
share and change all versions of a program--to make sure it remains free
17
+
software for all its users. We, the Free Software Foundation, use the
18
+
GNU General Public License for most of our software; it applies also to
19
+
any other work released this way by its authors. You can apply it to
20
+
your programs, too.
21
+
22
+
When we speak of free software, we are referring to freedom, not
23
+
price. Our General Public Licenses are designed to make sure that you
24
+
have the freedom to distribute copies of free software (and charge for
25
+
them if you wish), that you receive source code or can get it if you
26
+
want it, that you can change the software or use pieces of it in new
27
+
free programs, and that you know you can do these things.
28
+
29
+
To protect your rights, we need to prevent others from denying you
30
+
these rights or asking you to surrender the rights. Therefore, you have
31
+
certain responsibilities if you distribute copies of the software, or if
32
+
you modify it: responsibilities to respect the freedom of others.
33
+
34
+
For example, if you distribute copies of such a program, whether
35
+
gratis or for a fee, you must pass on to the recipients the same
36
+
freedoms that you received. You must make sure that they, too, receive
37
+
or can get the source code. And you must show them these terms so they
38
+
know their rights.
39
+
40
+
Developers that use the GNU GPL protect your rights with two steps:
41
+
(1) assert copyright on the software, and (2) offer you this License
42
+
giving you legal permission to copy, distribute and/or modify it.
43
+
44
+
For the developers' and authors' protection, the GPL clearly explains
45
+
that there is no warranty for this free software. For both users' and
46
+
authors' sake, the GPL requires that modified versions be marked as
47
+
changed, so that their problems will not be attributed erroneously to
48
+
authors of previous versions.
49
+
50
+
Some devices are designed to deny users access to install or run
51
+
modified versions of the software inside them, although the manufacturer
52
+
can do so. This is fundamentally incompatible with the aim of
53
+
protecting users' freedom to change the software. The systematic
54
+
pattern of such abuse occurs in the area of products for individuals to
55
+
use, which is precisely where it is most unacceptable. Therefore, we
56
+
have designed this version of the GPL to prohibit the practice for those
57
+
products. If such problems arise substantially in other domains, we
58
+
stand ready to extend this provision to those domains in future versions
59
+
of the GPL, as needed to protect the freedom of users.
60
+
61
+
Finally, every program is threatened constantly by software patents.
62
+
States should not allow patents to restrict development and use of
63
+
software on general-purpose computers, but in those that do, we wish to
64
+
avoid the special danger that patents applied to a free program could
65
+
make it effectively proprietary. To prevent this, the GPL assures that
66
+
patents cannot be used to render the program non-free.
67
+
68
+
The precise terms and conditions for copying, distribution and
69
+
modification follow.
70
+
71
+
TERMS AND CONDITIONS
72
+
73
+
0. Definitions.
74
+
75
+
"This License" refers to version 3 of the GNU General Public License.
76
+
77
+
"Copyright" also means copyright-like laws that apply to other kinds of
78
+
works, such as semiconductor masks.
79
+
80
+
"The Program" refers to any copyrightable work licensed under this
81
+
License. Each licensee is addressed as "you". "Licensees" and
82
+
"recipients" may be individuals or organizations.
83
+
84
+
To "modify" a work means to copy from or adapt all or part of the work
85
+
in a fashion requiring copyright permission, other than the making of an
86
+
exact copy. The resulting work is called a "modified version" of the
87
+
earlier work or a work "based on" the earlier work.
88
+
89
+
A "covered work" means either the unmodified Program or a work based
90
+
on the Program.
91
+
92
+
To "propagate" a work means to do anything with it that, without
93
+
permission, would make you directly or secondarily liable for
94
+
infringement under applicable copyright law, except executing it on a
95
+
computer or modifying a private copy. Propagation includes copying,
96
+
distribution (with or without modification), making available to the
97
+
public, and in some countries other activities as well.
98
+
99
+
To "convey" a work means any kind of propagation that enables other
100
+
parties to make or receive copies. Mere interaction with a user through
101
+
a computer network, with no transfer of a copy, is not conveying.
102
+
103
+
An interactive user interface displays "Appropriate Legal Notices"
104
+
to the extent that it includes a convenient and prominently visible
105
+
feature that (1) displays an appropriate copyright notice, and (2)
106
+
tells the user that there is no warranty for the work (except to the
107
+
extent that warranties are provided), that licensees may convey the
108
+
work under this License, and how to view a copy of this License. If
109
+
the interface presents a list of user commands or options, such as a
110
+
menu, a prominent item in the list meets this criterion.
111
+
112
+
1. Source Code.
113
+
114
+
The "source code" for a work means the preferred form of the work
115
+
for making modifications to it. "Object code" means any non-source
116
+
form of a work.
117
+
118
+
A "Standard Interface" means an interface that either is an official
119
+
standard defined by a recognized standards body, or, in the case of
120
+
interfaces specified for a particular programming language, one that
121
+
is widely used among developers working in that language.
122
+
123
+
The "System Libraries" of an executable work include anything, other
124
+
than the work as a whole, that (a) is included in the normal form of
125
+
packaging a Major Component, but which is not part of that Major
126
+
Component, and (b) serves only to enable use of the work with that
127
+
Major Component, or to implement a Standard Interface for which an
128
+
implementation is available to the public in source code form. A
129
+
"Major Component", in this context, means a major essential component
130
+
(kernel, window system, and so on) of the specific operating system
131
+
(if any) on which the executable work runs, or a compiler used to
132
+
produce the work, or an object code interpreter used to run it.
133
+
134
+
The "Corresponding Source" for a work in object code form means all
135
+
the source code needed to generate, install, and (for an executable
136
+
work) run the object code and to modify the work, including scripts to
137
+
control those activities. However, it does not include the work's
138
+
System Libraries, or general-purpose tools or generally available free
139
+
programs which are used unmodified in performing those activities but
140
+
which are not part of the work. For example, Corresponding Source
141
+
includes interface definition files associated with source files for
142
+
the work, and the source code for shared libraries and dynamically
143
+
linked subprograms that the work is specifically designed to require,
144
+
such as by intimate data communication or control flow between those
145
+
subprograms and other parts of the work.
146
+
147
+
The Corresponding Source need not include anything that users
148
+
can regenerate automatically from other parts of the Corresponding
149
+
Source.
150
+
151
+
The Corresponding Source for a work in source code form is that
152
+
same work.
153
+
154
+
2. Basic Permissions.
155
+
156
+
All rights granted under this License are granted for the term of
157
+
copyright on the Program, and are irrevocable provided the stated
158
+
conditions are met. This License explicitly affirms your unlimited
159
+
permission to run the unmodified Program. The output from running a
160
+
covered work is covered by this License only if the output, given its
161
+
content, constitutes a covered work. This License acknowledges your
162
+
rights of fair use or other equivalent, as provided by copyright law.
163
+
164
+
You may make, run and propagate covered works that you do not
165
+
convey, without conditions so long as your license otherwise remains
166
+
in force. You may convey covered works to others for the sole purpose
167
+
of having them make modifications exclusively for you, or provide you
168
+
with facilities for running those works, provided that you comply with
169
+
the terms of this License in conveying all material for which you do
170
+
not control copyright. Those thus making or running the covered works
171
+
for you must do so exclusively on your behalf, under your direction
172
+
and control, on terms that prohibit them from making any copies of
173
+
your copyrighted material outside their relationship with you.
174
+
175
+
Conveying under any other circumstances is permitted solely under
176
+
the conditions stated below. Sublicensing is not allowed; section 10
177
+
makes it unnecessary.
178
+
179
+
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180
+
181
+
No covered work shall be deemed part of an effective technological
182
+
measure under any applicable law fulfilling obligations under article
183
+
11 of the WIPO copyright treaty adopted on 20 December 1996, or
184
+
similar laws prohibiting or restricting circumvention of such
185
+
measures.
186
+
187
+
When you convey a covered work, you waive any legal power to forbid
188
+
circumvention of technological measures to the extent such circumvention
189
+
is effected by exercising rights under this License with respect to
190
+
the covered work, and you disclaim any intention to limit operation or
191
+
modification of the work as a means of enforcing, against the work's
192
+
users, your or third parties' legal rights to forbid circumvention of
193
+
technological measures.
194
+
195
+
4. Conveying Verbatim Copies.
196
+
197
+
You may convey verbatim copies of the Program's source code as you
198
+
receive it, in any medium, provided that you conspicuously and
199
+
appropriately publish on each copy an appropriate copyright notice;
200
+
keep intact all notices stating that this License and any
201
+
non-permissive terms added in accord with section 7 apply to the code;
202
+
keep intact all notices of the absence of any warranty; and give all
203
+
recipients a copy of this License along with the Program.
204
+
205
+
You may charge any price or no price for each copy that you convey,
206
+
and you may offer support or warranty protection for a fee.
207
+
208
+
5. Conveying Modified Source Versions.
209
+
210
+
You may convey a work based on the Program, or the modifications to
211
+
produce it from the Program, in the form of source code under the
212
+
terms of section 4, provided that you also meet all of these conditions:
213
+
214
+
a) The work must carry prominent notices stating that you modified
215
+
it, and giving a relevant date.
216
+
217
+
b) The work must carry prominent notices stating that it is
218
+
released under this License and any conditions added under section
219
+
7. This requirement modifies the requirement in section 4 to
220
+
"keep intact all notices".
221
+
222
+
c) You must license the entire work, as a whole, under this
223
+
License to anyone who comes into possession of a copy. This
224
+
License will therefore apply, along with any applicable section 7
225
+
additional terms, to the whole of the work, and all its parts,
226
+
regardless of how they are packaged. This License gives no
227
+
permission to license the work in any other way, but it does not
228
+
invalidate such permission if you have separately received it.
229
+
230
+
d) If the work has interactive user interfaces, each must display
231
+
Appropriate Legal Notices; however, if the Program has interactive
232
+
interfaces that do not display Appropriate Legal Notices, your
233
+
work need not make them do so.
234
+
235
+
A compilation of a covered work with other separate and independent
236
+
works, which are not by their nature extensions of the covered work,
237
+
and which are not combined with it such as to form a larger program,
238
+
in or on a volume of a storage or distribution medium, is called an
239
+
"aggregate" if the compilation and its resulting copyright are not
240
+
used to limit the access or legal rights of the compilation's users
241
+
beyond what the individual works permit. Inclusion of a covered work
242
+
in an aggregate does not cause this License to apply to the other
243
+
parts of the aggregate.
244
+
245
+
6. Conveying Non-Source Forms.
246
+
247
+
You may convey a covered work in object code form under the terms
248
+
of sections 4 and 5, provided that you also convey the
249
+
machine-readable Corresponding Source under the terms of this License,
250
+
in one of these ways:
251
+
252
+
a) Convey the object code in, or embodied in, a physical product
253
+
(including a physical distribution medium), accompanied by the
254
+
Corresponding Source fixed on a durable physical medium
255
+
customarily used for software interchange.
256
+
257
+
b) Convey the object code in, or embodied in, a physical product
258
+
(including a physical distribution medium), accompanied by a
259
+
written offer, valid for at least three years and valid for as
260
+
long as you offer spare parts or customer support for that product
261
+
model, to give anyone who possesses the object code either (1) a
262
+
copy of the Corresponding Source for all the software in the
263
+
product that is covered by this License, on a durable physical
264
+
medium customarily used for software interchange, for a price no
265
+
more than your reasonable cost of physically performing this
266
+
conveying of source, or (2) access to copy the
267
+
Corresponding Source from a network server at no charge.
268
+
269
+
c) Convey individual copies of the object code with a copy of the
270
+
written offer to provide the Corresponding Source. This
271
+
alternative is allowed only occasionally and noncommercially, and
272
+
only if you received the object code with such an offer, in accord
273
+
with subsection 6b.
274
+
275
+
d) Convey the object code by offering access from a designated
276
+
place (gratis or for a charge), and offer equivalent access to the
277
+
Corresponding Source in the same way through the same place at no
278
+
further charge. You need not require recipients to copy the
279
+
Corresponding Source along with the object code. If the place to
280
+
copy the object code is a network server, the Corresponding Source
281
+
may be on a different server (operated by you or a third party)
282
+
that supports equivalent copying facilities, provided you maintain
283
+
clear directions next to the object code saying where to find the
284
+
Corresponding Source. Regardless of what server hosts the
285
+
Corresponding Source, you remain obligated to ensure that it is
286
+
available for as long as needed to satisfy these requirements.
287
+
288
+
e) Convey the object code using peer-to-peer transmission, provided
289
+
you inform other peers where the object code and Corresponding
290
+
Source of the work are being offered to the general public at no
291
+
charge under subsection 6d.
292
+
293
+
A separable portion of the object code, whose source code is excluded
294
+
from the Corresponding Source as a System Library, need not be
295
+
included in conveying the object code work.
296
+
297
+
A "User Product" is either (1) a "consumer product", which means any
298
+
tangible personal property which is normally used for personal, family,
299
+
or household purposes, or (2) anything designed or sold for incorporation
300
+
into a dwelling. In determining whether a product is a consumer product,
301
+
doubtful cases shall be resolved in favor of coverage. For a particular
302
+
product received by a particular user, "normally used" refers to a
303
+
typical or common use of that class of product, regardless of the status
304
+
of the particular user or of the way in which the particular user
305
+
actually uses, or expects or is expected to use, the product. A product
306
+
is a consumer product regardless of whether the product has substantial
307
+
commercial, industrial or non-consumer uses, unless such uses represent
308
+
the only significant mode of use of the product.
309
+
310
+
"Installation Information" for a User Product means any methods,
311
+
procedures, authorization keys, or other information required to install
312
+
and execute modified versions of a covered work in that User Product from
313
+
a modified version of its Corresponding Source. The information must
314
+
suffice to ensure that the continued functioning of the modified object
315
+
code is in no case prevented or interfered with solely because
316
+
modification has been made.
317
+
318
+
If you convey an object code work under this section in, or with, or
319
+
specifically for use in, a User Product, and the conveying occurs as
320
+
part of a transaction in which the right of possession and use of the
321
+
User Product is transferred to the recipient in perpetuity or for a
322
+
fixed term (regardless of how the transaction is characterized), the
323
+
Corresponding Source conveyed under this section must be accompanied
324
+
by the Installation Information. But this requirement does not apply
325
+
if neither you nor any third party retains the ability to install
326
+
modified object code on the User Product (for example, the work has
327
+
been installed in ROM).
328
+
329
+
The requirement to provide Installation Information does not include a
330
+
requirement to continue to provide support service, warranty, or updates
331
+
for a work that has been modified or installed by the recipient, or for
332
+
the User Product in which it has been modified or installed. Access to a
333
+
network may be denied when the modification itself materially and
334
+
adversely affects the operation of the network or violates the rules and
335
+
protocols for communication across the network.
336
+
337
+
Corresponding Source conveyed, and Installation Information provided,
338
+
in accord with this section must be in a format that is publicly
339
+
documented (and with an implementation available to the public in
340
+
source code form), and must require no special password or key for
341
+
unpacking, reading or copying.
342
+
343
+
7. Additional Terms.
344
+
345
+
"Additional permissions" are terms that supplement the terms of this
346
+
License by making exceptions from one or more of its conditions.
347
+
Additional permissions that are applicable to the entire Program shall
348
+
be treated as though they were included in this License, to the extent
349
+
that they are valid under applicable law. If additional permissions
350
+
apply only to part of the Program, that part may be used separately
351
+
under those permissions, but the entire Program remains governed by
352
+
this License without regard to the additional permissions.
353
+
354
+
When you convey a copy of a covered work, you may at your option
355
+
remove any additional permissions from that copy, or from any part of
356
+
it. (Additional permissions may be written to require their own
357
+
removal in certain cases when you modify the work.) You may place
358
+
additional permissions on material, added by you to a covered work,
359
+
for which you have or can give appropriate copyright permission.
360
+
361
+
Notwithstanding any other provision of this License, for material you
362
+
add to a covered work, you may (if authorized by the copyright holders of
363
+
that material) supplement the terms of this License with terms:
364
+
365
+
a) Disclaiming warranty or limiting liability differently from the
366
+
terms of sections 15 and 16 of this License; or
367
+
368
+
b) Requiring preservation of specified reasonable legal notices or
369
+
author attributions in that material or in the Appropriate Legal
370
+
Notices displayed by works containing it; or
371
+
372
+
c) Prohibiting misrepresentation of the origin of that material, or
373
+
requiring that modified versions of such material be marked in
374
+
reasonable ways as different from the original version; or
375
+
376
+
d) Limiting the use for publicity purposes of names of licensors or
377
+
authors of the material; or
378
+
379
+
e) Declining to grant rights under trademark law for use of some
380
+
trade names, trademarks, or service marks; or
381
+
382
+
f) Requiring indemnification of licensors and authors of that
383
+
material by anyone who conveys the material (or modified versions of
384
+
it) with contractual assumptions of liability to the recipient, for
385
+
any liability that these contractual assumptions directly impose on
386
+
those licensors and authors.
387
+
388
+
All other non-permissive additional terms are considered "further
389
+
restrictions" within the meaning of section 10. If the Program as you
390
+
received it, or any part of it, contains a notice stating that it is
391
+
governed by this License along with a term that is a further
392
+
restriction, you may remove that term. If a license document contains
393
+
a further restriction but permits relicensing or conveying under this
394
+
License, you may add to a covered work material governed by the terms
395
+
of that license document, provided that the further restriction does
396
+
not survive such relicensing or conveying.
397
+
398
+
If you add terms to a covered work in accord with this section, you
399
+
must place, in the relevant source files, a statement of the
400
+
additional terms that apply to those files, or a notice indicating
401
+
where to find the applicable terms.
402
+
403
+
Additional terms, permissive or non-permissive, may be stated in the
404
+
form of a separately written license, or stated as exceptions;
405
+
the above requirements apply either way.
406
+
407
+
8. Termination.
408
+
409
+
You may not propagate or modify a covered work except as expressly
410
+
provided under this License. Any attempt otherwise to propagate or
411
+
modify it is void, and will automatically terminate your rights under
412
+
this License (including any patent licenses granted under the third
413
+
paragraph of section 11).
414
+
415
+
However, if you cease all violation of this License, then your
416
+
license from a particular copyright holder is reinstated (a)
417
+
provisionally, unless and until the copyright holder explicitly and
418
+
finally terminates your license, and (b) permanently, if the copyright
419
+
holder fails to notify you of the violation by some reasonable means
420
+
prior to 60 days after the cessation.
421
+
422
+
Moreover, your license from a particular copyright holder is
423
+
reinstated permanently if the copyright holder notifies you of the
424
+
violation by some reasonable means, this is the first time you have
425
+
received notice of violation of this License (for any work) from that
426
+
copyright holder, and you cure the violation prior to 30 days after
427
+
your receipt of the notice.
428
+
429
+
Termination of your rights under this section does not terminate the
430
+
licenses of parties who have received copies or rights from you under
431
+
this License. If your rights have been terminated and not permanently
432
+
reinstated, you do not qualify to receive new licenses for the same
433
+
material under section 10.
434
+
435
+
9. Acceptance Not Required for Having Copies.
436
+
437
+
You are not required to accept this License in order to receive or
438
+
run a copy of the Program. Ancillary propagation of a covered work
439
+
occurring solely as a consequence of using peer-to-peer transmission
440
+
to receive a copy likewise does not require acceptance. However,
441
+
nothing other than this License grants you permission to propagate or
442
+
modify any covered work. These actions infringe copyright if you do
443
+
not accept this License. Therefore, by modifying or propagating a
444
+
covered work, you indicate your acceptance of this License to do so.
445
+
446
+
10. Automatic Licensing of Downstream Recipients.
447
+
448
+
Each time you convey a covered work, the recipient automatically
449
+
receives a license from the original licensors, to run, modify and
450
+
propagate that work, subject to this License. You are not responsible
451
+
for enforcing compliance by third parties with this License.
452
+
453
+
An "entity transaction" is a transaction transferring control of an
454
+
organization, or substantially all assets of one, or subdividing an
455
+
organization, or merging organizations. If propagation of a covered
456
+
work results from an entity transaction, each party to that
457
+
transaction who receives a copy of the work also receives whatever
458
+
licenses to the work the party's predecessor in interest had or could
459
+
give under the previous paragraph, plus a right to possession of the
460
+
Corresponding Source of the work from the predecessor in interest, if
461
+
the predecessor has it or can get it with reasonable efforts.
462
+
463
+
You may not impose any further restrictions on the exercise of the
464
+
rights granted or affirmed under this License. For example, you may
465
+
not impose a license fee, royalty, or other charge for exercise of
466
+
rights granted under this License, and you may not initiate litigation
467
+
(including a cross-claim or counterclaim in a lawsuit) alleging that
468
+
any patent claim is infringed by making, using, selling, offering for
469
+
sale, or importing the Program or any portion of it.
470
+
471
+
11. Patents.
472
+
473
+
A "contributor" is a copyright holder who authorizes use under this
474
+
License of the Program or a work on which the Program is based. The
475
+
work thus licensed is called the contributor's "contributor version".
476
+
477
+
A contributor's "essential patent claims" are all patent claims
478
+
owned or controlled by the contributor, whether already acquired or
479
+
hereafter acquired, that would be infringed by some manner, permitted
480
+
by this License, of making, using, or selling its contributor version,
481
+
but do not include claims that would be infringed only as a
482
+
consequence of further modification of the contributor version. For
483
+
purposes of this definition, "control" includes the right to grant
484
+
patent sublicenses in a manner consistent with the requirements of
485
+
this License.
486
+
487
+
Each contributor grants you a non-exclusive, worldwide, royalty-free
488
+
patent license under the contributor's essential patent claims, to
489
+
make, use, sell, offer for sale, import and otherwise run, modify and
490
+
propagate the contents of its contributor version.
491
+
492
+
In the following three paragraphs, a "patent license" is any express
493
+
agreement or commitment, however denominated, not to enforce a patent
494
+
(such as an express permission to practice a patent or covenant not to
495
+
sue for patent infringement). To "grant" such a patent license to a
496
+
party means to make such an agreement or commitment not to enforce a
497
+
patent against the party.
498
+
499
+
If you convey a covered work, knowingly relying on a patent license,
500
+
and the Corresponding Source of the work is not available for anyone
501
+
to copy, free of charge and under the terms of this License, through a
502
+
publicly available network server or other readily accessible means,
503
+
then you must either (1) cause the Corresponding Source to be so
504
+
available, or (2) arrange to deprive yourself of the benefit of the
505
+
patent license for this particular work, or (3) arrange, in a manner
506
+
consistent with the requirements of this License, to extend the patent
507
+
license to downstream recipients. "Knowingly relying" means you have
508
+
actual knowledge that, but for the patent license, your conveying the
509
+
covered work in a country, or your recipient's use of the covered work
510
+
in a country, would infringe one or more identifiable patents in that
511
+
country that you have reason to believe are valid.
512
+
513
+
If, pursuant to or in connection with a single transaction or
514
+
arrangement, you convey, or propagate by procuring conveyance of, a
515
+
covered work, and grant a patent license to some of the parties
516
+
receiving the covered work authorizing them to use, propagate, modify
517
+
or convey a specific copy of the covered work, then the patent license
518
+
you grant is automatically extended to all recipients of the covered
519
+
work and works based on it.
520
+
521
+
A patent license is "discriminatory" if it does not include within
522
+
the scope of its coverage, prohibits the exercise of, or is
523
+
conditioned on the non-exercise of one or more of the rights that are
524
+
specifically granted under this License. You may not convey a covered
525
+
work if you are a party to an arrangement with a third party that is
526
+
in the business of distributing software, under which you make payment
527
+
to the third party based on the extent of your activity of conveying
528
+
the work, and under which the third party grants, to any of the
529
+
parties who would receive the covered work from you, a discriminatory
530
+
patent license (a) in connection with copies of the covered work
531
+
conveyed by you (or copies made from those copies), or (b) primarily
532
+
for and in connection with specific products or compilations that
533
+
contain the covered work, unless you entered into that arrangement,
534
+
or that patent license was granted, prior to 28 March 2007.
535
+
536
+
Nothing in this License shall be construed as excluding or limiting
537
+
any implied license or other defenses to infringement that may
538
+
otherwise be available to you under applicable patent law.
539
+
540
+
12. No Surrender of Others' Freedom.
541
+
542
+
If conditions are imposed on you (whether by court order, agreement or
543
+
otherwise) that contradict the conditions of this License, they do not
544
+
excuse you from the conditions of this License. If you cannot convey a
545
+
covered work so as to satisfy simultaneously your obligations under this
546
+
License and any other pertinent obligations, then as a consequence you may
547
+
not convey it at all. For example, if you agree to terms that obligate you
548
+
to collect a royalty for further conveying from those to whom you convey
549
+
the Program, the only way you could satisfy both those terms and this
550
+
License would be to refrain entirely from conveying the Program.
551
+
552
+
13. Use with the GNU Affero General Public License.
553
+
554
+
Notwithstanding any other provision of this License, you have
555
+
permission to link or combine any covered work with a work licensed
556
+
under version 3 of the GNU Affero General Public License into a single
557
+
combined work, and to convey the resulting work. The terms of this
558
+
License will continue to apply to the part which is the covered work,
559
+
but the special requirements of the GNU Affero General Public License,
560
+
section 13, concerning interaction through a network will apply to the
561
+
combination as such.
562
+
563
+
14. Revised Versions of this License.
564
+
565
+
The Free Software Foundation may publish revised and/or new versions of
566
+
the GNU General Public License from time to time. Such new versions will
567
+
be similar in spirit to the present version, but may differ in detail to
568
+
address new problems or concerns.
569
+
570
+
Each version is given a distinguishing version number. If the
571
+
Program specifies that a certain numbered version of the GNU General
572
+
Public License "or any later version" applies to it, you have the
573
+
option of following the terms and conditions either of that numbered
574
+
version or of any later version published by the Free Software
575
+
Foundation. If the Program does not specify a version number of the
576
+
GNU General Public License, you may choose any version ever published
577
+
by the Free Software Foundation.
578
+
579
+
If the Program specifies that a proxy can decide which future
580
+
versions of the GNU General Public License can be used, that proxy's
581
+
public statement of acceptance of a version permanently authorizes you
582
+
to choose that version for the Program.
583
+
584
+
Later license versions may give you additional or different
585
+
permissions. However, no additional obligations are imposed on any
586
+
author or copyright holder as a result of your choosing to follow a
587
+
later version.
588
+
589
+
15. Disclaimer of Warranty.
590
+
591
+
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592
+
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593
+
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594
+
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595
+
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596
+
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597
+
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598
+
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599
+
600
+
16. Limitation of Liability.
601
+
602
+
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603
+
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604
+
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605
+
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606
+
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607
+
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608
+
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609
+
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610
+
SUCH DAMAGES.
611
+
612
+
17. Interpretation of Sections 15 and 16.
613
+
614
+
If the disclaimer of warranty and limitation of liability provided
615
+
above cannot be given local legal effect according to their terms,
616
+
reviewing courts shall apply local law that most closely approximates
617
+
an absolute waiver of all civil liability in connection with the
618
+
Program, unless a warranty or assumption of liability accompanies a
619
+
copy of the Program in return for a fee.
620
+
621
+
END OF TERMS AND CONDITIONS
622
+
623
+
How to Apply These Terms to Your New Programs
624
+
625
+
If you develop a new program, and you want it to be of the greatest
626
+
possible use to the public, the best way to achieve this is to make it
627
+
free software which everyone can redistribute and change under these terms.
628
+
629
+
To do so, attach the following notices to the program. It is safest
630
+
to attach them to the start of each source file to most effectively
631
+
state the exclusion of warranty; and each file should have at least
632
+
the "copyright" line and a pointer to where the full notice is found.
633
+
634
+
<one line to give the program's name and a brief idea of what it does.>
635
+
Copyright (C) <year> <name of author>
636
+
637
+
This program is free software: you can redistribute it and/or modify
638
+
it under the terms of the GNU General Public License as published by
639
+
the Free Software Foundation, either version 3 of the License, or
640
+
(at your option) any later version.
641
+
642
+
This program is distributed in the hope that it will be useful,
643
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
644
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645
+
GNU General Public License for more details.
646
+
647
+
You should have received a copy of the GNU General Public License
648
+
along with this program. If not, see <https://www.gnu.org/licenses/>.
649
+
650
+
Also add information on how to contact you by electronic and paper mail.
651
+
652
+
If the program does terminal interaction, make it output a short
653
+
notice like this when it starts in an interactive mode:
654
+
655
+
<program> Copyright (C) <year> <name of author>
656
+
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657
+
This is free software, and you are welcome to redistribute it
658
+
under certain conditions; type `show c' for details.
659
+
660
+
The hypothetical commands `show w' and `show c' should show the appropriate
661
+
parts of the General Public License. Of course, your program's commands
662
+
might be different; for a GUI interface, you would use an "about box".
663
+
664
+
You should also get your employer (if you work as a programmer) or school,
665
+
if any, to sign a "copyright disclaimer" for the program, if necessary.
666
+
For more information on this, and how to apply and follow the GNU GPL, see
667
+
<https://www.gnu.org/licenses/>.
668
+
669
+
The GNU General Public License does not permit incorporating your program
670
+
into proprietary programs. If your program is a subroutine library, you
671
+
may consider it more useful to permit linking proprietary applications with
672
+
the library. If this is what you want to do, use the GNU Lesser General
673
+
Public License instead of this License. But first, please read
674
+
<https://www.gnu.org/licenses/why-not-lgpl.html>.
+4
xnalara_io_Tools/modules/ALXAddonUpdater/README.md
+4
xnalara_io_Tools/modules/ALXAddonUpdater/README.md
+32
xnalara_io_Tools/modules/ALXInfoSystem/ALXInfoSystem/ALX_InfoSystem.py
+32
xnalara_io_Tools/modules/ALXInfoSystem/ALXInfoSystem/ALX_InfoSystem.py
···
1
+
# import bpy
2
+
3
+
4
+
# class ALX_OT_Operator_Modal_InfoPopupAwaitCompletion(bpy.types.Operator):
5
+
# """"""
6
+
7
+
# bl_label = ""
8
+
# bl_idname = "alx.operator_modal_info_popup_await_completion"
9
+
10
+
# @classmethod
11
+
# def poll(self, context):
12
+
# return True
13
+
14
+
# def execute(self, context: bpy.types.Context):
15
+
# return {"FINISHED"}
16
+
17
+
# def modal(self, context: bpy.types.Context, event: bpy.types.Event):
18
+
# return {"RUNNING_MODAL"}
19
+
20
+
# def draw(self, context: bpy.types.Context):
21
+
# template_list
22
+
23
+
# def invoke(self, context: bpy.types.Context, event: bpy.types.Event):
24
+
# wm: bpy.types.WindowManager = context.window_manager
25
+
# return wm.invoke_popup(self, width=180)
26
+
27
+
28
+
# def register_info():
29
+
# bpy.types.WindowManager
30
+
31
+
32
+
# def unregister_info():
+1
xnalara_io_Tools/modules/ALXModuleManager/.github/FUNDING.yml
+1
xnalara_io_Tools/modules/ALXModuleManager/.github/FUNDING.yml
···
1
+
ko_fi: housearhal
+1
xnalara_io_Tools/modules/ALXModuleManager/.gitignore
+1
xnalara_io_Tools/modules/ALXModuleManager/.gitignore
···
1
+
__pycache__
+198
xnalara_io_Tools/modules/ALXModuleManager/ALXModuleManager/ALX_ModuleManager.py
+198
xnalara_io_Tools/modules/ALXModuleManager/ALXModuleManager/ALX_ModuleManager.py
···
1
+
import os
2
+
from contextlib import redirect_stdout
3
+
from inspect import getmembers, isclass
4
+
from os import sep as os_separator
5
+
from pathlib import Path
6
+
from typing import Any, Optional
7
+
8
+
import bpy
9
+
import bpy.utils.previews as previews
10
+
11
+
12
+
class Alx_Module_Manager():
13
+
14
+
__init_globals: dict[str, Any]
15
+
16
+
__module_path: str = ""
17
+
__module_folders: set[Path] = set()
18
+
__module_files: dict[str, Path] = dict()
19
+
__module_classes: set[str] = set()
20
+
21
+
__folder_blacklist: set[str] = set()
22
+
__file_blacklist: set[str] = set()
23
+
24
+
__resources: previews.ImagePreviewCollection = None
25
+
26
+
__mute: bool = True
27
+
28
+
def __init__(self, path: str, globals: dict[str, Any], mute: Optional[bool] = True):
29
+
self.__mute = mute
30
+
31
+
self.__folder_blacklist.update({"__pycache__"})
32
+
self.__file_blacklist.update({"__init__"})
33
+
34
+
self.__module_path = path[0]
35
+
self.__init_globals = globals
36
+
37
+
def developer_register_modules(self):
38
+
self.__module_folders = self.__gather_addon_folders(self.__module_path, self.__folder_blacklist)
39
+
self.__module_files = self.__gather_addon_files(self.__module_folders, self.__file_blacklist)
40
+
self.__execute_locals_update(self.__module_path, self.__module_files)
41
+
self.__module_classes = self.__gather_classes_from_files(self.__module_files)
42
+
self.__register_addon_classes(self.__module_classes)
43
+
44
+
def developer_unregister_modules(self):
45
+
self.__unregister_addon_classes(self.__module_classes)
46
+
47
+
def developer_blacklist_folder(self, folders: set[str]):
48
+
self.__folder_blacklist.add(*folders)
49
+
50
+
def developer_blacklist_file(self, files: set[str]):
51
+
self.__file_blacklist.add(*files)
52
+
53
+
def developer_load_resources(self, icons_definitions: list[dict["name":str, "path":str, "resource_type":str]]):
54
+
"""
55
+
name : str [MUST BE UNIQUE]\n
56
+
path : str\n [MUST BE RELATIVE TO THE FOLDER CONTAINING THE ADDON'S INIT FILE]
57
+
resource_type : str ['IMAGE', 'MOVIE', 'BLEND', 'FONT']\n
58
+
"""
59
+
if (self.__resources is None):
60
+
self.__resources = previews.new()
61
+
62
+
name_id_pairs = {}
63
+
for entry in icons_definitions:
64
+
65
+
if ({"name", "path", "resource_type"}.issubset(set(entry.keys()))):
66
+
path_object = Path(f"{self.__module_path}{os_separator if self.__module_path[-1] != os_separator else ''}{entry['path']}")
67
+
if (path_object.exists()) and (path_object.is_file()):
68
+
self.__resources.load(
69
+
entry["name"],
70
+
str(path_object),
71
+
entry["resource_type"],
72
+
True
73
+
)
74
+
75
+
name_id_pairs.update({entry["name"]: self.__resources[entry["name"]].icon_id})
76
+
77
+
icons_path_object = Path(f"{self.__module_path}\\icons.py")
78
+
79
+
icons_path_object.parent.mkdir(exist_ok=True, parents=True)
80
+
with icons_path_object.open('w') as icon_file:
81
+
text = "icons_dictionary={\n"
82
+
83
+
for string in [*[f"\"{entry_name}\" : {entry_id},\n" for entry_name, entry_id in name_id_pairs.items()], "\n}"]:
84
+
text += string
85
+
86
+
icon_file.write(text)
87
+
88
+
def __gather_addon_folders(self, path: str, folder_blacklist: set[str] = {}):
89
+
"""
90
+
IN path: __path__[0] from __init__ \n
91
+
IN folder_blacklist: set[str] \n
92
+
93
+
RETURN addon_folders: set[Path] \n
94
+
"""
95
+
96
+
path_object: Path = Path(path)
97
+
addon_folders: set[Path] = set()
98
+
99
+
if (path_object.exists()) and (path_object.is_dir()):
100
+
path_iter_queue: list[Path] = [path_object]
101
+
102
+
for folder_path in path_iter_queue:
103
+
if (folder_path.is_dir()) and (folder_path.exists()) and (folder_path not in addon_folders) and (folder_path.name not in folder_blacklist):
104
+
addon_folders.add(folder_path)
105
+
106
+
for subfolder_path in folder_path.iterdir():
107
+
if (subfolder_path.is_dir()) and (subfolder_path.exists()) and (subfolder_path not in addon_folders) and (subfolder_path.name not in folder_blacklist):
108
+
path_iter_queue.append(subfolder_path)
109
+
addon_folders.add(subfolder_path)
110
+
111
+
return addon_folders
112
+
113
+
def __gather_addon_files(self, folder_paths: set[Path], file_blacklist: set[str] = {}):
114
+
"""
115
+
IN folder_paths: set[Path] \n
116
+
IN file_blacklist: set[str] \n
117
+
118
+
RETRUN addon_files: set[str] \n
119
+
"""
120
+
121
+
addon_files: dict[str, Path] = dict()
122
+
123
+
for folder_path in folder_paths:
124
+
for file in folder_path.iterdir():
125
+
if (file.is_file()) and (file.name not in file_blacklist) and (file.suffix == ".py"):
126
+
addon_files.update({file.name[0:-3]: folder_path})
127
+
128
+
return addon_files
129
+
130
+
def __gather_classes_from_files(self, addon_files: dict[str, Path] = None):
131
+
addon_classes: set[str] = set()
132
+
133
+
for file_name in addon_files.keys():
134
+
if (file_name != __file__) and (file_name not in self.__file_blacklist):
135
+
for addon_class in getmembers(eval(file_name, self.__init_globals), isclass):
136
+
addon_classes.add(addon_class[1])
137
+
138
+
return addon_classes
139
+
140
+
def __execute_locals_update(self, path: str, addon_files: dict[str, Path]):
141
+
for file_name in addon_files.keys():
142
+
if (file_name != __name__.split(".")[-1]) and (file_name not in self.__file_blacklist):
143
+
try:
144
+
if ("importlib" not in self.__init_globals):
145
+
exec("import importlib", self.__init_globals)
146
+
147
+
if (file_name not in self.__init_globals):
148
+
relative_path = str(addon_files.get(file_name).relative_to(path)).replace(os_separator, ".")
149
+
150
+
import_line = f"from . {relative_path if relative_path != '.' else ''} import {file_name}"
151
+
exec(import_line, self.__init_globals)
152
+
else:
153
+
reload_line = f"{file_name} = importlib.reload({file_name})"
154
+
exec(reload_line, self.__init_globals)
155
+
except Exception as error:
156
+
if (self.__mute == False):
157
+
print(f"[{file_name}] {error}")
158
+
159
+
def __register_addon_classes(self, addon_classes: list[object]):
160
+
for addon_class in addon_classes:
161
+
try:
162
+
if (self.__mute):
163
+
with open(os.devnull, 'w') as print_discard_bin:
164
+
with redirect_stdout(print_discard_bin):
165
+
if ("WorkSpaceTool" in [base.__name__ for base in addon_class.__bases__]):
166
+
bpy.utils.register_tool(addon_class,
167
+
after=eval(addon_class.after, self.__init_globals),
168
+
separator=addon_class.separator,
169
+
group=addon_class.group)
170
+
else:
171
+
bpy.utils.register_class(addon_class)
172
+
else:
173
+
if ("WorkSpaceTool" in [base.__name__ for base in addon_class.__bases__]):
174
+
bpy.utils.register_tool(addon_class,
175
+
after=eval(addon_class.after, self.__init_globals),
176
+
separator=addon_class.separator,
177
+
group=addon_class.group)
178
+
else:
179
+
bpy.utils.register_class(addon_class)
180
+
181
+
except Exception as error:
182
+
if (self.__mute == False):
183
+
print(error)
184
+
185
+
def __unregister_addon_classes(self, addon_classes: list[object]):
186
+
for addon_class in addon_classes:
187
+
try:
188
+
if ("WorkSpaceTool" in [base.__name__ for base in addon_class.__bases__]):
189
+
bpy.utils.unregister_tool(addon_class)
190
+
else:
191
+
bpy.utils.unregister_class(addon_class)
192
+
193
+
except Exception as error:
194
+
if (self.__mute == False):
195
+
print(error)
196
+
197
+
if (self.__resources is not None):
198
+
previews.remove(self.__resources)
+674
xnalara_io_Tools/modules/ALXModuleManager/LICENSE
+674
xnalara_io_Tools/modules/ALXModuleManager/LICENSE
···
1
+
GNU GENERAL PUBLIC LICENSE
2
+
Version 3, 29 June 2007
3
+
4
+
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
5
+
Everyone is permitted to copy and distribute verbatim copies
6
+
of this license document, but changing it is not allowed.
7
+
8
+
Preamble
9
+
10
+
The GNU General Public License is a free, copyleft license for
11
+
software and other kinds of works.
12
+
13
+
The licenses for most software and other practical works are designed
14
+
to take away your freedom to share and change the works. By contrast,
15
+
the GNU General Public License is intended to guarantee your freedom to
16
+
share and change all versions of a program--to make sure it remains free
17
+
software for all its users. We, the Free Software Foundation, use the
18
+
GNU General Public License for most of our software; it applies also to
19
+
any other work released this way by its authors. You can apply it to
20
+
your programs, too.
21
+
22
+
When we speak of free software, we are referring to freedom, not
23
+
price. Our General Public Licenses are designed to make sure that you
24
+
have the freedom to distribute copies of free software (and charge for
25
+
them if you wish), that you receive source code or can get it if you
26
+
want it, that you can change the software or use pieces of it in new
27
+
free programs, and that you know you can do these things.
28
+
29
+
To protect your rights, we need to prevent others from denying you
30
+
these rights or asking you to surrender the rights. Therefore, you have
31
+
certain responsibilities if you distribute copies of the software, or if
32
+
you modify it: responsibilities to respect the freedom of others.
33
+
34
+
For example, if you distribute copies of such a program, whether
35
+
gratis or for a fee, you must pass on to the recipients the same
36
+
freedoms that you received. You must make sure that they, too, receive
37
+
or can get the source code. And you must show them these terms so they
38
+
know their rights.
39
+
40
+
Developers that use the GNU GPL protect your rights with two steps:
41
+
(1) assert copyright on the software, and (2) offer you this License
42
+
giving you legal permission to copy, distribute and/or modify it.
43
+
44
+
For the developers' and authors' protection, the GPL clearly explains
45
+
that there is no warranty for this free software. For both users' and
46
+
authors' sake, the GPL requires that modified versions be marked as
47
+
changed, so that their problems will not be attributed erroneously to
48
+
authors of previous versions.
49
+
50
+
Some devices are designed to deny users access to install or run
51
+
modified versions of the software inside them, although the manufacturer
52
+
can do so. This is fundamentally incompatible with the aim of
53
+
protecting users' freedom to change the software. The systematic
54
+
pattern of such abuse occurs in the area of products for individuals to
55
+
use, which is precisely where it is most unacceptable. Therefore, we
56
+
have designed this version of the GPL to prohibit the practice for those
57
+
products. If such problems arise substantially in other domains, we
58
+
stand ready to extend this provision to those domains in future versions
59
+
of the GPL, as needed to protect the freedom of users.
60
+
61
+
Finally, every program is threatened constantly by software patents.
62
+
States should not allow patents to restrict development and use of
63
+
software on general-purpose computers, but in those that do, we wish to
64
+
avoid the special danger that patents applied to a free program could
65
+
make it effectively proprietary. To prevent this, the GPL assures that
66
+
patents cannot be used to render the program non-free.
67
+
68
+
The precise terms and conditions for copying, distribution and
69
+
modification follow.
70
+
71
+
TERMS AND CONDITIONS
72
+
73
+
0. Definitions.
74
+
75
+
"This License" refers to version 3 of the GNU General Public License.
76
+
77
+
"Copyright" also means copyright-like laws that apply to other kinds of
78
+
works, such as semiconductor masks.
79
+
80
+
"The Program" refers to any copyrightable work licensed under this
81
+
License. Each licensee is addressed as "you". "Licensees" and
82
+
"recipients" may be individuals or organizations.
83
+
84
+
To "modify" a work means to copy from or adapt all or part of the work
85
+
in a fashion requiring copyright permission, other than the making of an
86
+
exact copy. The resulting work is called a "modified version" of the
87
+
earlier work or a work "based on" the earlier work.
88
+
89
+
A "covered work" means either the unmodified Program or a work based
90
+
on the Program.
91
+
92
+
To "propagate" a work means to do anything with it that, without
93
+
permission, would make you directly or secondarily liable for
94
+
infringement under applicable copyright law, except executing it on a
95
+
computer or modifying a private copy. Propagation includes copying,
96
+
distribution (with or without modification), making available to the
97
+
public, and in some countries other activities as well.
98
+
99
+
To "convey" a work means any kind of propagation that enables other
100
+
parties to make or receive copies. Mere interaction with a user through
101
+
a computer network, with no transfer of a copy, is not conveying.
102
+
103
+
An interactive user interface displays "Appropriate Legal Notices"
104
+
to the extent that it includes a convenient and prominently visible
105
+
feature that (1) displays an appropriate copyright notice, and (2)
106
+
tells the user that there is no warranty for the work (except to the
107
+
extent that warranties are provided), that licensees may convey the
108
+
work under this License, and how to view a copy of this License. If
109
+
the interface presents a list of user commands or options, such as a
110
+
menu, a prominent item in the list meets this criterion.
111
+
112
+
1. Source Code.
113
+
114
+
The "source code" for a work means the preferred form of the work
115
+
for making modifications to it. "Object code" means any non-source
116
+
form of a work.
117
+
118
+
A "Standard Interface" means an interface that either is an official
119
+
standard defined by a recognized standards body, or, in the case of
120
+
interfaces specified for a particular programming language, one that
121
+
is widely used among developers working in that language.
122
+
123
+
The "System Libraries" of an executable work include anything, other
124
+
than the work as a whole, that (a) is included in the normal form of
125
+
packaging a Major Component, but which is not part of that Major
126
+
Component, and (b) serves only to enable use of the work with that
127
+
Major Component, or to implement a Standard Interface for which an
128
+
implementation is available to the public in source code form. A
129
+
"Major Component", in this context, means a major essential component
130
+
(kernel, window system, and so on) of the specific operating system
131
+
(if any) on which the executable work runs, or a compiler used to
132
+
produce the work, or an object code interpreter used to run it.
133
+
134
+
The "Corresponding Source" for a work in object code form means all
135
+
the source code needed to generate, install, and (for an executable
136
+
work) run the object code and to modify the work, including scripts to
137
+
control those activities. However, it does not include the work's
138
+
System Libraries, or general-purpose tools or generally available free
139
+
programs which are used unmodified in performing those activities but
140
+
which are not part of the work. For example, Corresponding Source
141
+
includes interface definition files associated with source files for
142
+
the work, and the source code for shared libraries and dynamically
143
+
linked subprograms that the work is specifically designed to require,
144
+
such as by intimate data communication or control flow between those
145
+
subprograms and other parts of the work.
146
+
147
+
The Corresponding Source need not include anything that users
148
+
can regenerate automatically from other parts of the Corresponding
149
+
Source.
150
+
151
+
The Corresponding Source for a work in source code form is that
152
+
same work.
153
+
154
+
2. Basic Permissions.
155
+
156
+
All rights granted under this License are granted for the term of
157
+
copyright on the Program, and are irrevocable provided the stated
158
+
conditions are met. This License explicitly affirms your unlimited
159
+
permission to run the unmodified Program. The output from running a
160
+
covered work is covered by this License only if the output, given its
161
+
content, constitutes a covered work. This License acknowledges your
162
+
rights of fair use or other equivalent, as provided by copyright law.
163
+
164
+
You may make, run and propagate covered works that you do not
165
+
convey, without conditions so long as your license otherwise remains
166
+
in force. You may convey covered works to others for the sole purpose
167
+
of having them make modifications exclusively for you, or provide you
168
+
with facilities for running those works, provided that you comply with
169
+
the terms of this License in conveying all material for which you do
170
+
not control copyright. Those thus making or running the covered works
171
+
for you must do so exclusively on your behalf, under your direction
172
+
and control, on terms that prohibit them from making any copies of
173
+
your copyrighted material outside their relationship with you.
174
+
175
+
Conveying under any other circumstances is permitted solely under
176
+
the conditions stated below. Sublicensing is not allowed; section 10
177
+
makes it unnecessary.
178
+
179
+
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180
+
181
+
No covered work shall be deemed part of an effective technological
182
+
measure under any applicable law fulfilling obligations under article
183
+
11 of the WIPO copyright treaty adopted on 20 December 1996, or
184
+
similar laws prohibiting or restricting circumvention of such
185
+
measures.
186
+
187
+
When you convey a covered work, you waive any legal power to forbid
188
+
circumvention of technological measures to the extent such circumvention
189
+
is effected by exercising rights under this License with respect to
190
+
the covered work, and you disclaim any intention to limit operation or
191
+
modification of the work as a means of enforcing, against the work's
192
+
users, your or third parties' legal rights to forbid circumvention of
193
+
technological measures.
194
+
195
+
4. Conveying Verbatim Copies.
196
+
197
+
You may convey verbatim copies of the Program's source code as you
198
+
receive it, in any medium, provided that you conspicuously and
199
+
appropriately publish on each copy an appropriate copyright notice;
200
+
keep intact all notices stating that this License and any
201
+
non-permissive terms added in accord with section 7 apply to the code;
202
+
keep intact all notices of the absence of any warranty; and give all
203
+
recipients a copy of this License along with the Program.
204
+
205
+
You may charge any price or no price for each copy that you convey,
206
+
and you may offer support or warranty protection for a fee.
207
+
208
+
5. Conveying Modified Source Versions.
209
+
210
+
You may convey a work based on the Program, or the modifications to
211
+
produce it from the Program, in the form of source code under the
212
+
terms of section 4, provided that you also meet all of these conditions:
213
+
214
+
a) The work must carry prominent notices stating that you modified
215
+
it, and giving a relevant date.
216
+
217
+
b) The work must carry prominent notices stating that it is
218
+
released under this License and any conditions added under section
219
+
7. This requirement modifies the requirement in section 4 to
220
+
"keep intact all notices".
221
+
222
+
c) You must license the entire work, as a whole, under this
223
+
License to anyone who comes into possession of a copy. This
224
+
License will therefore apply, along with any applicable section 7
225
+
additional terms, to the whole of the work, and all its parts,
226
+
regardless of how they are packaged. This License gives no
227
+
permission to license the work in any other way, but it does not
228
+
invalidate such permission if you have separately received it.
229
+
230
+
d) If the work has interactive user interfaces, each must display
231
+
Appropriate Legal Notices; however, if the Program has interactive
232
+
interfaces that do not display Appropriate Legal Notices, your
233
+
work need not make them do so.
234
+
235
+
A compilation of a covered work with other separate and independent
236
+
works, which are not by their nature extensions of the covered work,
237
+
and which are not combined with it such as to form a larger program,
238
+
in or on a volume of a storage or distribution medium, is called an
239
+
"aggregate" if the compilation and its resulting copyright are not
240
+
used to limit the access or legal rights of the compilation's users
241
+
beyond what the individual works permit. Inclusion of a covered work
242
+
in an aggregate does not cause this License to apply to the other
243
+
parts of the aggregate.
244
+
245
+
6. Conveying Non-Source Forms.
246
+
247
+
You may convey a covered work in object code form under the terms
248
+
of sections 4 and 5, provided that you also convey the
249
+
machine-readable Corresponding Source under the terms of this License,
250
+
in one of these ways:
251
+
252
+
a) Convey the object code in, or embodied in, a physical product
253
+
(including a physical distribution medium), accompanied by the
254
+
Corresponding Source fixed on a durable physical medium
255
+
customarily used for software interchange.
256
+
257
+
b) Convey the object code in, or embodied in, a physical product
258
+
(including a physical distribution medium), accompanied by a
259
+
written offer, valid for at least three years and valid for as
260
+
long as you offer spare parts or customer support for that product
261
+
model, to give anyone who possesses the object code either (1) a
262
+
copy of the Corresponding Source for all the software in the
263
+
product that is covered by this License, on a durable physical
264
+
medium customarily used for software interchange, for a price no
265
+
more than your reasonable cost of physically performing this
266
+
conveying of source, or (2) access to copy the
267
+
Corresponding Source from a network server at no charge.
268
+
269
+
c) Convey individual copies of the object code with a copy of the
270
+
written offer to provide the Corresponding Source. This
271
+
alternative is allowed only occasionally and noncommercially, and
272
+
only if you received the object code with such an offer, in accord
273
+
with subsection 6b.
274
+
275
+
d) Convey the object code by offering access from a designated
276
+
place (gratis or for a charge), and offer equivalent access to the
277
+
Corresponding Source in the same way through the same place at no
278
+
further charge. You need not require recipients to copy the
279
+
Corresponding Source along with the object code. If the place to
280
+
copy the object code is a network server, the Corresponding Source
281
+
may be on a different server (operated by you or a third party)
282
+
that supports equivalent copying facilities, provided you maintain
283
+
clear directions next to the object code saying where to find the
284
+
Corresponding Source. Regardless of what server hosts the
285
+
Corresponding Source, you remain obligated to ensure that it is
286
+
available for as long as needed to satisfy these requirements.
287
+
288
+
e) Convey the object code using peer-to-peer transmission, provided
289
+
you inform other peers where the object code and Corresponding
290
+
Source of the work are being offered to the general public at no
291
+
charge under subsection 6d.
292
+
293
+
A separable portion of the object code, whose source code is excluded
294
+
from the Corresponding Source as a System Library, need not be
295
+
included in conveying the object code work.
296
+
297
+
A "User Product" is either (1) a "consumer product", which means any
298
+
tangible personal property which is normally used for personal, family,
299
+
or household purposes, or (2) anything designed or sold for incorporation
300
+
into a dwelling. In determining whether a product is a consumer product,
301
+
doubtful cases shall be resolved in favor of coverage. For a particular
302
+
product received by a particular user, "normally used" refers to a
303
+
typical or common use of that class of product, regardless of the status
304
+
of the particular user or of the way in which the particular user
305
+
actually uses, or expects or is expected to use, the product. A product
306
+
is a consumer product regardless of whether the product has substantial
307
+
commercial, industrial or non-consumer uses, unless such uses represent
308
+
the only significant mode of use of the product.
309
+
310
+
"Installation Information" for a User Product means any methods,
311
+
procedures, authorization keys, or other information required to install
312
+
and execute modified versions of a covered work in that User Product from
313
+
a modified version of its Corresponding Source. The information must
314
+
suffice to ensure that the continued functioning of the modified object
315
+
code is in no case prevented or interfered with solely because
316
+
modification has been made.
317
+
318
+
If you convey an object code work under this section in, or with, or
319
+
specifically for use in, a User Product, and the conveying occurs as
320
+
part of a transaction in which the right of possession and use of the
321
+
User Product is transferred to the recipient in perpetuity or for a
322
+
fixed term (regardless of how the transaction is characterized), the
323
+
Corresponding Source conveyed under this section must be accompanied
324
+
by the Installation Information. But this requirement does not apply
325
+
if neither you nor any third party retains the ability to install
326
+
modified object code on the User Product (for example, the work has
327
+
been installed in ROM).
328
+
329
+
The requirement to provide Installation Information does not include a
330
+
requirement to continue to provide support service, warranty, or updates
331
+
for a work that has been modified or installed by the recipient, or for
332
+
the User Product in which it has been modified or installed. Access to a
333
+
network may be denied when the modification itself materially and
334
+
adversely affects the operation of the network or violates the rules and
335
+
protocols for communication across the network.
336
+
337
+
Corresponding Source conveyed, and Installation Information provided,
338
+
in accord with this section must be in a format that is publicly
339
+
documented (and with an implementation available to the public in
340
+
source code form), and must require no special password or key for
341
+
unpacking, reading or copying.
342
+
343
+
7. Additional Terms.
344
+
345
+
"Additional permissions" are terms that supplement the terms of this
346
+
License by making exceptions from one or more of its conditions.
347
+
Additional permissions that are applicable to the entire Program shall
348
+
be treated as though they were included in this License, to the extent
349
+
that they are valid under applicable law. If additional permissions
350
+
apply only to part of the Program, that part may be used separately
351
+
under those permissions, but the entire Program remains governed by
352
+
this License without regard to the additional permissions.
353
+
354
+
When you convey a copy of a covered work, you may at your option
355
+
remove any additional permissions from that copy, or from any part of
356
+
it. (Additional permissions may be written to require their own
357
+
removal in certain cases when you modify the work.) You may place
358
+
additional permissions on material, added by you to a covered work,
359
+
for which you have or can give appropriate copyright permission.
360
+
361
+
Notwithstanding any other provision of this License, for material you
362
+
add to a covered work, you may (if authorized by the copyright holders of
363
+
that material) supplement the terms of this License with terms:
364
+
365
+
a) Disclaiming warranty or limiting liability differently from the
366
+
terms of sections 15 and 16 of this License; or
367
+
368
+
b) Requiring preservation of specified reasonable legal notices or
369
+
author attributions in that material or in the Appropriate Legal
370
+
Notices displayed by works containing it; or
371
+
372
+
c) Prohibiting misrepresentation of the origin of that material, or
373
+
requiring that modified versions of such material be marked in
374
+
reasonable ways as different from the original version; or
375
+
376
+
d) Limiting the use for publicity purposes of names of licensors or
377
+
authors of the material; or
378
+
379
+
e) Declining to grant rights under trademark law for use of some
380
+
trade names, trademarks, or service marks; or
381
+
382
+
f) Requiring indemnification of licensors and authors of that
383
+
material by anyone who conveys the material (or modified versions of
384
+
it) with contractual assumptions of liability to the recipient, for
385
+
any liability that these contractual assumptions directly impose on
386
+
those licensors and authors.
387
+
388
+
All other non-permissive additional terms are considered "further
389
+
restrictions" within the meaning of section 10. If the Program as you
390
+
received it, or any part of it, contains a notice stating that it is
391
+
governed by this License along with a term that is a further
392
+
restriction, you may remove that term. If a license document contains
393
+
a further restriction but permits relicensing or conveying under this
394
+
License, you may add to a covered work material governed by the terms
395
+
of that license document, provided that the further restriction does
396
+
not survive such relicensing or conveying.
397
+
398
+
If you add terms to a covered work in accord with this section, you
399
+
must place, in the relevant source files, a statement of the
400
+
additional terms that apply to those files, or a notice indicating
401
+
where to find the applicable terms.
402
+
403
+
Additional terms, permissive or non-permissive, may be stated in the
404
+
form of a separately written license, or stated as exceptions;
405
+
the above requirements apply either way.
406
+
407
+
8. Termination.
408
+
409
+
You may not propagate or modify a covered work except as expressly
410
+
provided under this License. Any attempt otherwise to propagate or
411
+
modify it is void, and will automatically terminate your rights under
412
+
this License (including any patent licenses granted under the third
413
+
paragraph of section 11).
414
+
415
+
However, if you cease all violation of this License, then your
416
+
license from a particular copyright holder is reinstated (a)
417
+
provisionally, unless and until the copyright holder explicitly and
418
+
finally terminates your license, and (b) permanently, if the copyright
419
+
holder fails to notify you of the violation by some reasonable means
420
+
prior to 60 days after the cessation.
421
+
422
+
Moreover, your license from a particular copyright holder is
423
+
reinstated permanently if the copyright holder notifies you of the
424
+
violation by some reasonable means, this is the first time you have
425
+
received notice of violation of this License (for any work) from that
426
+
copyright holder, and you cure the violation prior to 30 days after
427
+
your receipt of the notice.
428
+
429
+
Termination of your rights under this section does not terminate the
430
+
licenses of parties who have received copies or rights from you under
431
+
this License. If your rights have been terminated and not permanently
432
+
reinstated, you do not qualify to receive new licenses for the same
433
+
material under section 10.
434
+
435
+
9. Acceptance Not Required for Having Copies.
436
+
437
+
You are not required to accept this License in order to receive or
438
+
run a copy of the Program. Ancillary propagation of a covered work
439
+
occurring solely as a consequence of using peer-to-peer transmission
440
+
to receive a copy likewise does not require acceptance. However,
441
+
nothing other than this License grants you permission to propagate or
442
+
modify any covered work. These actions infringe copyright if you do
443
+
not accept this License. Therefore, by modifying or propagating a
444
+
covered work, you indicate your acceptance of this License to do so.
445
+
446
+
10. Automatic Licensing of Downstream Recipients.
447
+
448
+
Each time you convey a covered work, the recipient automatically
449
+
receives a license from the original licensors, to run, modify and
450
+
propagate that work, subject to this License. You are not responsible
451
+
for enforcing compliance by third parties with this License.
452
+
453
+
An "entity transaction" is a transaction transferring control of an
454
+
organization, or substantially all assets of one, or subdividing an
455
+
organization, or merging organizations. If propagation of a covered
456
+
work results from an entity transaction, each party to that
457
+
transaction who receives a copy of the work also receives whatever
458
+
licenses to the work the party's predecessor in interest had or could
459
+
give under the previous paragraph, plus a right to possession of the
460
+
Corresponding Source of the work from the predecessor in interest, if
461
+
the predecessor has it or can get it with reasonable efforts.
462
+
463
+
You may not impose any further restrictions on the exercise of the
464
+
rights granted or affirmed under this License. For example, you may
465
+
not impose a license fee, royalty, or other charge for exercise of
466
+
rights granted under this License, and you may not initiate litigation
467
+
(including a cross-claim or counterclaim in a lawsuit) alleging that
468
+
any patent claim is infringed by making, using, selling, offering for
469
+
sale, or importing the Program or any portion of it.
470
+
471
+
11. Patents.
472
+
473
+
A "contributor" is a copyright holder who authorizes use under this
474
+
License of the Program or a work on which the Program is based. The
475
+
work thus licensed is called the contributor's "contributor version".
476
+
477
+
A contributor's "essential patent claims" are all patent claims
478
+
owned or controlled by the contributor, whether already acquired or
479
+
hereafter acquired, that would be infringed by some manner, permitted
480
+
by this License, of making, using, or selling its contributor version,
481
+
but do not include claims that would be infringed only as a
482
+
consequence of further modification of the contributor version. For
483
+
purposes of this definition, "control" includes the right to grant
484
+
patent sublicenses in a manner consistent with the requirements of
485
+
this License.
486
+
487
+
Each contributor grants you a non-exclusive, worldwide, royalty-free
488
+
patent license under the contributor's essential patent claims, to
489
+
make, use, sell, offer for sale, import and otherwise run, modify and
490
+
propagate the contents of its contributor version.
491
+
492
+
In the following three paragraphs, a "patent license" is any express
493
+
agreement or commitment, however denominated, not to enforce a patent
494
+
(such as an express permission to practice a patent or covenant not to
495
+
sue for patent infringement). To "grant" such a patent license to a
496
+
party means to make such an agreement or commitment not to enforce a
497
+
patent against the party.
498
+
499
+
If you convey a covered work, knowingly relying on a patent license,
500
+
and the Corresponding Source of the work is not available for anyone
501
+
to copy, free of charge and under the terms of this License, through a
502
+
publicly available network server or other readily accessible means,
503
+
then you must either (1) cause the Corresponding Source to be so
504
+
available, or (2) arrange to deprive yourself of the benefit of the
505
+
patent license for this particular work, or (3) arrange, in a manner
506
+
consistent with the requirements of this License, to extend the patent
507
+
license to downstream recipients. "Knowingly relying" means you have
508
+
actual knowledge that, but for the patent license, your conveying the
509
+
covered work in a country, or your recipient's use of the covered work
510
+
in a country, would infringe one or more identifiable patents in that
511
+
country that you have reason to believe are valid.
512
+
513
+
If, pursuant to or in connection with a single transaction or
514
+
arrangement, you convey, or propagate by procuring conveyance of, a
515
+
covered work, and grant a patent license to some of the parties
516
+
receiving the covered work authorizing them to use, propagate, modify
517
+
or convey a specific copy of the covered work, then the patent license
518
+
you grant is automatically extended to all recipients of the covered
519
+
work and works based on it.
520
+
521
+
A patent license is "discriminatory" if it does not include within
522
+
the scope of its coverage, prohibits the exercise of, or is
523
+
conditioned on the non-exercise of one or more of the rights that are
524
+
specifically granted under this License. You may not convey a covered
525
+
work if you are a party to an arrangement with a third party that is
526
+
in the business of distributing software, under which you make payment
527
+
to the third party based on the extent of your activity of conveying
528
+
the work, and under which the third party grants, to any of the
529
+
parties who would receive the covered work from you, a discriminatory
530
+
patent license (a) in connection with copies of the covered work
531
+
conveyed by you (or copies made from those copies), or (b) primarily
532
+
for and in connection with specific products or compilations that
533
+
contain the covered work, unless you entered into that arrangement,
534
+
or that patent license was granted, prior to 28 March 2007.
535
+
536
+
Nothing in this License shall be construed as excluding or limiting
537
+
any implied license or other defenses to infringement that may
538
+
otherwise be available to you under applicable patent law.
539
+
540
+
12. No Surrender of Others' Freedom.
541
+
542
+
If conditions are imposed on you (whether by court order, agreement or
543
+
otherwise) that contradict the conditions of this License, they do not
544
+
excuse you from the conditions of this License. If you cannot convey a
545
+
covered work so as to satisfy simultaneously your obligations under this
546
+
License and any other pertinent obligations, then as a consequence you may
547
+
not convey it at all. For example, if you agree to terms that obligate you
548
+
to collect a royalty for further conveying from those to whom you convey
549
+
the Program, the only way you could satisfy both those terms and this
550
+
License would be to refrain entirely from conveying the Program.
551
+
552
+
13. Use with the GNU Affero General Public License.
553
+
554
+
Notwithstanding any other provision of this License, you have
555
+
permission to link or combine any covered work with a work licensed
556
+
under version 3 of the GNU Affero General Public License into a single
557
+
combined work, and to convey the resulting work. The terms of this
558
+
License will continue to apply to the part which is the covered work,
559
+
but the special requirements of the GNU Affero General Public License,
560
+
section 13, concerning interaction through a network will apply to the
561
+
combination as such.
562
+
563
+
14. Revised Versions of this License.
564
+
565
+
The Free Software Foundation may publish revised and/or new versions of
566
+
the GNU General Public License from time to time. Such new versions will
567
+
be similar in spirit to the present version, but may differ in detail to
568
+
address new problems or concerns.
569
+
570
+
Each version is given a distinguishing version number. If the
571
+
Program specifies that a certain numbered version of the GNU General
572
+
Public License "or any later version" applies to it, you have the
573
+
option of following the terms and conditions either of that numbered
574
+
version or of any later version published by the Free Software
575
+
Foundation. If the Program does not specify a version number of the
576
+
GNU General Public License, you may choose any version ever published
577
+
by the Free Software Foundation.
578
+
579
+
If the Program specifies that a proxy can decide which future
580
+
versions of the GNU General Public License can be used, that proxy's
581
+
public statement of acceptance of a version permanently authorizes you
582
+
to choose that version for the Program.
583
+
584
+
Later license versions may give you additional or different
585
+
permissions. However, no additional obligations are imposed on any
586
+
author or copyright holder as a result of your choosing to follow a
587
+
later version.
588
+
589
+
15. Disclaimer of Warranty.
590
+
591
+
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592
+
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593
+
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594
+
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595
+
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596
+
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597
+
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598
+
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599
+
600
+
16. Limitation of Liability.
601
+
602
+
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603
+
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604
+
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605
+
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606
+
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607
+
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608
+
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609
+
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610
+
SUCH DAMAGES.
611
+
612
+
17. Interpretation of Sections 15 and 16.
613
+
614
+
If the disclaimer of warranty and limitation of liability provided
615
+
above cannot be given local legal effect according to their terms,
616
+
reviewing courts shall apply local law that most closely approximates
617
+
an absolute waiver of all civil liability in connection with the
618
+
Program, unless a warranty or assumption of liability accompanies a
619
+
copy of the Program in return for a fee.
620
+
621
+
END OF TERMS AND CONDITIONS
622
+
623
+
How to Apply These Terms to Your New Programs
624
+
625
+
If you develop a new program, and you want it to be of the greatest
626
+
possible use to the public, the best way to achieve this is to make it
627
+
free software which everyone can redistribute and change under these terms.
628
+
629
+
To do so, attach the following notices to the program. It is safest
630
+
to attach them to the start of each source file to most effectively
631
+
state the exclusion of warranty; and each file should have at least
632
+
the "copyright" line and a pointer to where the full notice is found.
633
+
634
+
<one line to give the program's name and a brief idea of what it does.>
635
+
Copyright (C) <year> <name of author>
636
+
637
+
This program is free software: you can redistribute it and/or modify
638
+
it under the terms of the GNU General Public License as published by
639
+
the Free Software Foundation, either version 3 of the License, or
640
+
(at your option) any later version.
641
+
642
+
This program is distributed in the hope that it will be useful,
643
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
644
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645
+
GNU General Public License for more details.
646
+
647
+
You should have received a copy of the GNU General Public License
648
+
along with this program. If not, see <https://www.gnu.org/licenses/>.
649
+
650
+
Also add information on how to contact you by electronic and paper mail.
651
+
652
+
If the program does terminal interaction, make it output a short
653
+
notice like this when it starts in an interactive mode:
654
+
655
+
<program> Copyright (C) <year> <name of author>
656
+
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657
+
This is free software, and you are welcome to redistribute it
658
+
under certain conditions; type `show c' for details.
659
+
660
+
The hypothetical commands `show w' and `show c' should show the appropriate
661
+
parts of the General Public License. Of course, your program's commands
662
+
might be different; for a GUI interface, you would use an "about box".
663
+
664
+
You should also get your employer (if you work as a programmer) or school,
665
+
if any, to sign a "copyright disclaimer" for the program, if necessary.
666
+
For more information on this, and how to apply and follow the GNU GPL, see
667
+
<https://www.gnu.org/licenses/>.
668
+
669
+
The GNU General Public License does not permit incorporating your program
670
+
into proprietary programs. If your program is a subroutine library, you
671
+
may consider it more useful to permit linking proprietary applications with
672
+
the library. If this is what you want to do, use the GNU Lesser General
673
+
Public License instead of this License. But first, please read
674
+
<https://www.gnu.org/licenses/why-not-lgpl.html>.
+3
xnalara_io_Tools/modules/ALXModuleManager/README.md
+3
xnalara_io_Tools/modules/ALXModuleManager/README.md
+441
xnalara_io_Tools/node_shader_utils.py
+441
xnalara_io_Tools/node_shader_utils.py
···
1
+
import bpy
2
+
from bpy_extras import node_shader_utils
3
+
from mathutils import Vector
4
+
5
+
6
+
class XPSShaderWrapper(node_shader_utils.ShaderWrapper):
7
+
"""
8
+
Hard coded shader setup, based in XPS Shader.
9
+
Should cover most common cases on import, and gives a basic nodal shaders support for export.
10
+
"""
11
+
NODES_LIST = (
12
+
"node_out",
13
+
"node_principled_bsdf",
14
+
15
+
"_node_normalmap",
16
+
"_node_texcoords",
17
+
)
18
+
19
+
__slots__ = (
20
+
"is_readonly",
21
+
"material",
22
+
*NODES_LIST,
23
+
)
24
+
25
+
NODES_LIST = node_shader_utils.ShaderWrapper.NODES_LIST + NODES_LIST
26
+
27
+
def __init__(self, material, is_readonly=True, use_nodes=True):
28
+
super(XPSShaderWrapper, self).__init__(material, is_readonly, use_nodes)
29
+
30
+
def update(self):
31
+
super(XPSShaderWrapper, self).update()
32
+
33
+
if not self.use_nodes:
34
+
return
35
+
36
+
tree = self.material.node_tree
37
+
38
+
nodes = tree.nodes
39
+
links = tree.links
40
+
41
+
# --------------------------------------------------------------------
42
+
# Main output and shader.
43
+
node_out = None
44
+
node_principled = None
45
+
for n in nodes:
46
+
# print("loop:",n.name)
47
+
if n.bl_idname == 'ShaderNodeOutputMaterial' and n.inputs[0].is_linked:
48
+
# print("output found:")
49
+
node_out = n
50
+
node_principled = n.inputs[0].links[0].from_node
51
+
elif n.bl_idname == 'ShaderNodeGroup' and n.node_tree.name == 'XPS Shader' and n.outputs[0].is_linked:
52
+
# print("xps shader found")
53
+
node_principled = n
54
+
for lnk in n.outputs[0].links:
55
+
node_out = lnk.to_node
56
+
if node_out.bl_idname == 'ShaderNodeOutputMaterial':
57
+
break
58
+
if (
59
+
node_out is not None and node_principled is not None
60
+
and node_out.bl_idname == 'ShaderNodeOutputMaterial'
61
+
and node_principled.bl_idname == 'ShaderNodeGroup'
62
+
and node_principled.node_tree.name == 'XPS Shader'
63
+
):
64
+
break
65
+
node_out = node_principled = None # Could not find a valid pair, let's try again
66
+
67
+
if node_out is not None:
68
+
self._grid_to_location(0, 0, ref_node=node_out)
69
+
elif not self.is_readonly:
70
+
node_out = nodes.new(type='ShaderNodeOutputMaterial')
71
+
node_out.label = "Material Out"
72
+
node_out.target = 'ALL'
73
+
self._grid_to_location(1, 1, dst_node=node_out)
74
+
self.node_out = node_out
75
+
76
+
if node_principled is not None:
77
+
self._grid_to_location(0, 0, ref_node=node_principled)
78
+
elif not self.is_readonly:
79
+
node_principled = nodes.new(type='XPS Shader')
80
+
node_principled.label = "Principled BSDF"
81
+
self._grid_to_location(0, 1, dst_node=node_principled)
82
+
# Link
83
+
links.new(node_principled.outputs["BSDF"], self.node_out.inputs["Surface"])
84
+
self.node_principled_bsdf = node_principled
85
+
86
+
# --------------------------------------------------------------------
87
+
# Normal Map, lazy initialization...
88
+
self._node_normalmap = ...
89
+
90
+
# --------------------------------------------------------------------
91
+
# Tex Coords, lazy initialization...
92
+
self._node_texcoords = ...
93
+
94
+
# --------------------------------------------------------------------
95
+
# Get Image wrapper.
96
+
97
+
def node_texture_get(self, inputName):
98
+
if not self.use_nodes or self.node_principled_bsdf is None:
99
+
return None
100
+
return node_shader_utils.ShaderImageTextureWrapper(
101
+
self, self.node_principled_bsdf,
102
+
self.node_principled_bsdf.inputs[inputName],
103
+
grid_row_diff=1,
104
+
)
105
+
106
+
# --------------------------------------------------------------------
107
+
# Get Environment wrapper.
108
+
109
+
def node_environment_get(self, inputName):
110
+
if not self.use_nodes or self.node_principled_bsdf is None:
111
+
return None
112
+
return ShaderEnvironmentTextureWrapper(
113
+
self, self.node_principled_bsdf,
114
+
self.node_principled_bsdf.inputs[inputName],
115
+
grid_row_diff=1,
116
+
)
117
+
118
+
# --------------------------------------------------------------------
119
+
# Diffuse Texture.
120
+
121
+
def diffuse_texture_get(self):
122
+
return self.node_texture_get("Diffuse")
123
+
124
+
diffuse_texture = property(diffuse_texture_get)
125
+
126
+
# --------------------------------------------------------------------
127
+
# Light Map.
128
+
129
+
def lightmap_texture_get(self):
130
+
return self.node_texture_get("Lightmap")
131
+
132
+
lightmap_texture = property(lightmap_texture_get)
133
+
134
+
# --------------------------------------------------------------------
135
+
# Specular.
136
+
137
+
def specular_texture_get(self):
138
+
return self.node_texture_get("Specular")
139
+
140
+
specular_texture = property(specular_texture_get)
141
+
142
+
# --------------------------------------------------------------------
143
+
# Emission texture.
144
+
145
+
def emission_texture_get(self):
146
+
return self.node_texture_get("Emission")
147
+
148
+
emission_texture = property(emission_texture_get)
149
+
150
+
# --------------------------------------------------------------------
151
+
# Normal map.
152
+
153
+
def normalmap_texture_get(self):
154
+
return self.node_texture_get("Bump Map")
155
+
156
+
normalmap_texture = property(normalmap_texture_get)
157
+
158
+
# --------------------------------------------------------------------
159
+
# Normal Mask.
160
+
161
+
def normal_mask_texture_get(self):
162
+
return self.node_texture_get("Bump Mask")
163
+
164
+
normal_mask_texture = property(normal_mask_texture_get)
165
+
166
+
# --------------------------------------------------------------------
167
+
# Micro Bump 1.
168
+
169
+
def microbump1_texture_get(self):
170
+
return self.node_texture_get("MicroBump 1")
171
+
172
+
microbump1_texture = property(microbump1_texture_get)
173
+
174
+
# --------------------------------------------------------------------
175
+
# Micro Bump 2.
176
+
177
+
def microbump2_texture_get(self):
178
+
return self.node_texture_get("MicroBump 2")
179
+
180
+
microbump2_texture = property(microbump2_texture_get)
181
+
182
+
# --------------------------------------------------------------------
183
+
# Environment
184
+
185
+
def environment_texture_get(self):
186
+
return self.node_environment_get("Environment")
187
+
188
+
environment_texture = property(environment_texture_get)
189
+
190
+
191
+
class ShaderEnvironmentTextureWrapper():
192
+
"""
193
+
Generic 'environment texture'-like wrapper, handling image node
194
+
"""
195
+
196
+
# Note: this class assumes we are using nodes, otherwise it should never be used...
197
+
198
+
NODES_LIST = (
199
+
"node_dst",
200
+
"socket_dst",
201
+
202
+
"_node_image",
203
+
"_node_mapping",
204
+
)
205
+
206
+
__slots__ = (
207
+
"owner_shader",
208
+
"is_readonly",
209
+
"grid_row_diff",
210
+
"use_alpha",
211
+
"colorspace_is_data",
212
+
"colorspace_name",
213
+
*NODES_LIST,
214
+
)
215
+
216
+
def __new__(cls, owner_shader: node_shader_utils.ShaderWrapper, node_dst, socket_dst, *_args, **_kwargs):
217
+
instance = owner_shader._textures.get((node_dst, socket_dst), None)
218
+
if instance is not None:
219
+
return instance
220
+
instance = super(ShaderEnvironmentTextureWrapper, cls).__new__(cls)
221
+
owner_shader._textures[(node_dst, socket_dst)] = instance
222
+
return instance
223
+
224
+
def __init__(self, owner_shader: node_shader_utils.ShaderWrapper, node_dst, socket_dst, grid_row_diff=0,
225
+
use_alpha=False, colorspace_is_data=..., colorspace_name=...):
226
+
self.owner_shader = owner_shader
227
+
self.is_readonly = owner_shader.is_readonly
228
+
self.node_dst = node_dst
229
+
self.socket_dst = socket_dst
230
+
self.grid_row_diff = grid_row_diff
231
+
self.use_alpha = use_alpha
232
+
self.colorspace_is_data = colorspace_is_data
233
+
self.colorspace_name = colorspace_name
234
+
235
+
self._node_image = ...
236
+
self._node_mapping = ...
237
+
238
+
# tree = node_dst.id_data
239
+
# nodes = tree.nodes
240
+
# links = tree.links
241
+
242
+
if socket_dst.is_linked:
243
+
from_node = socket_dst.links[0].from_node
244
+
if from_node.bl_idname == 'ShaderNodeTexEnvironment':
245
+
self._node_image = from_node
246
+
247
+
if self.node_image is not None:
248
+
socket_dst = self.node_image.inputs["Vector"]
249
+
if socket_dst.is_linked:
250
+
from_node = socket_dst.links[0].from_node
251
+
if from_node.bl_idname == 'ShaderNodeMapping':
252
+
self._node_mapping = from_node
253
+
254
+
def copy_from(self, tex):
255
+
# Avoid generating any node in source texture.
256
+
is_readonly_back = tex.is_readonly
257
+
tex.is_readonly = True
258
+
259
+
if tex.node_image is not None:
260
+
self.image = tex.image
261
+
self.projection = tex.projection
262
+
self.texcoords = tex.texcoords
263
+
self.copy_mapping_from(tex)
264
+
265
+
tex.is_readonly = is_readonly_back
266
+
267
+
def copy_mapping_from(self, tex):
268
+
# Avoid generating any node in source texture.
269
+
is_readonly_back = tex.is_readonly
270
+
tex.is_readonly = True
271
+
272
+
if tex.node_mapping is None: # Used to actually remove mapping node.
273
+
if self.has_mapping_node():
274
+
# We assume node_image can never be None in that case...
275
+
# Find potential existing link into image's Vector input.
276
+
socket_dst = socket_src = None
277
+
if self.node_mapping.inputs["Vector"].is_linked:
278
+
socket_dst = self.node_image.inputs["Vector"]
279
+
socket_src = self.node_mapping.inputs["Vector"].links[0].from_socket
280
+
281
+
tree = self.owner_shader.material.node_tree
282
+
tree.nodes.remove(self.node_mapping)
283
+
self._node_mapping = None
284
+
285
+
# If previously existing, re-link texcoords -> image
286
+
if socket_src is not None:
287
+
tree.links.new(socket_src, socket_dst)
288
+
elif self.node_mapping is not None:
289
+
self.translation = tex.translation
290
+
self.rotation = tex.rotation
291
+
self.scale = tex.scale
292
+
293
+
tex.is_readonly = is_readonly_back
294
+
295
+
# --------------------------------------------------------------------
296
+
# Image.
297
+
298
+
def node_image_get(self):
299
+
if self._node_image is ...:
300
+
# Running only once, trying to find a valid image node.
301
+
if self.socket_dst.is_linked:
302
+
node_image = self.socket_dst.links[0].from_node
303
+
if node_image.bl_idname == 'ShaderNodeTexImage':
304
+
self._node_image = node_image
305
+
self.owner_shader._grid_to_location(0, 0, ref_node=node_image)
306
+
if self._node_image is ...:
307
+
self._node_image = None
308
+
if self._node_image is None and not self.is_readonly:
309
+
tree = self.owner_shader.material.node_tree
310
+
311
+
node_image = tree.nodes.new(type='ShaderNodeTexImage')
312
+
self.owner_shader._grid_to_location(-1, 0 + self.grid_row_diff, dst_node=node_image, ref_node=self.node_dst)
313
+
314
+
tree.links.new(node_image.outputs["Alpha" if self.use_alpha else "Color"], self.socket_dst)
315
+
316
+
self._node_image = node_image
317
+
return self._node_image
318
+
319
+
node_image = property(node_image_get)
320
+
321
+
def image_get(self):
322
+
return self.node_image.image if self.node_image is not None else None
323
+
324
+
@node_shader_utils._set_check
325
+
def image_set(self, image):
326
+
if self.colorspace_is_data is not ...:
327
+
if image.colorspace_settings.is_data != self.colorspace_is_data and image.users >= 1:
328
+
image = image.copy()
329
+
image.colorspace_settings.is_data = self.colorspace_is_data
330
+
if self.colorspace_name is not ...:
331
+
if image.colorspace_settings.is_data != self.colorspace_is_data and image.users >= 1:
332
+
image = image.copy()
333
+
image.colorspace_settings.name = self.colorspace_name
334
+
self.node_image.image = image
335
+
336
+
image = property(image_get, image_set)
337
+
338
+
def projection_get(self):
339
+
return self.node_image.projection if self.node_image is not None else 'EQUIRECTANGULAR'
340
+
341
+
@node_shader_utils._set_check
342
+
def projection_set(self, projection):
343
+
self.node_image.projection = projection
344
+
345
+
projection = property(projection_get, projection_set)
346
+
347
+
def texcoords_get(self):
348
+
if self.node_image is not None:
349
+
socket = (self.node_mapping if self.has_mapping_node() else self.node_image).inputs["Vector"]
350
+
if socket.is_linked:
351
+
return socket.links[0].from_socket.name
352
+
return 'UV'
353
+
354
+
@node_shader_utils._set_check
355
+
def texcoords_set(self, texcoords):
356
+
# Image texture node already defaults to UVs, no extra node needed.
357
+
# ONLY in case we do not have any texcoords mapping!!!
358
+
if texcoords == 'UV' and not self.has_mapping_node():
359
+
return
360
+
tree = self.node_image.id_data
361
+
links = tree.links
362
+
node_dst = self.node_mapping if self.has_mapping_node() else self.node_image
363
+
socket_src = self.owner_shader.node_texcoords.outputs[texcoords]
364
+
links.new(socket_src, node_dst.inputs["Vector"])
365
+
366
+
texcoords = property(texcoords_get, texcoords_set)
367
+
368
+
# --------------------------------------------------------------------
369
+
# Mapping.
370
+
371
+
def has_mapping_node(self):
372
+
return self._node_mapping not in {None, ...}
373
+
374
+
def node_mapping_get(self):
375
+
if self._node_mapping is ...:
376
+
# Running only once, trying to find a valid mapping node.
377
+
if self.node_image is None:
378
+
return None
379
+
if self.node_image.inputs["Vector"].is_linked:
380
+
node_mapping = self.node_image.inputs["Vector"].links[0].from_node
381
+
if node_mapping.bl_idname == 'ShaderNodeMapping':
382
+
self._node_mapping = node_mapping
383
+
self.owner_shader._grid_to_location(0, 0 + self.grid_row_diff, ref_node=node_mapping)
384
+
if self._node_mapping is ...:
385
+
self._node_mapping = None
386
+
if self._node_mapping is None and not self.is_readonly:
387
+
# Find potential existing link into image's Vector input.
388
+
socket_dst = self.node_image.inputs["Vector"]
389
+
# If not already existing, we need to create texcoords -> mapping link (from UV).
390
+
socket_src = (
391
+
socket_dst.links[0].from_socket if socket_dst.is_linked
392
+
else self.owner_shader.node_texcoords.outputs['UV']
393
+
)
394
+
395
+
tree = self.owner_shader.material.node_tree
396
+
node_mapping = tree.nodes.new(type='ShaderNodeMapping')
397
+
node_mapping.vector_type = 'TEXTURE'
398
+
self.owner_shader._grid_to_location(-1, 0, dst_node=node_mapping, ref_node=self.node_image)
399
+
400
+
# Link mapping -> image node.
401
+
tree.links.new(node_mapping.outputs["Vector"], socket_dst)
402
+
# Link texcoords -> mapping.
403
+
tree.links.new(socket_src, node_mapping.inputs["Vector"])
404
+
405
+
self._node_mapping = node_mapping
406
+
return self._node_mapping
407
+
408
+
node_mapping = property(node_mapping_get)
409
+
410
+
def translation_get(self):
411
+
if self.node_mapping is None:
412
+
return Vector((0.0, 0.0, 0.0))
413
+
return self.node_mapping.inputs['Location'].default_value
414
+
415
+
@node_shader_utils._set_check
416
+
def translation_set(self, translation):
417
+
self.node_mapping.inputs['Location'].default_value = translation
418
+
419
+
translation = property(translation_get, translation_set)
420
+
421
+
def rotation_get(self):
422
+
if self.node_mapping is None:
423
+
return Vector((0.0, 0.0, 0.0))
424
+
return self.node_mapping.inputs['Rotation'].default_value
425
+
426
+
@node_shader_utils._set_check
427
+
def rotation_set(self, rotation):
428
+
self.node_mapping.inputs['Rotation'].default_value = rotation
429
+
430
+
rotation = property(rotation_get, rotation_set)
431
+
432
+
def scale_get(self):
433
+
if self.node_mapping is None:
434
+
return Vector((1.0, 1.0, 1.0))
435
+
return self.node_mapping.inputs['Scale'].default_value
436
+
437
+
@node_shader_utils._set_check
438
+
def scale_set(self, scale):
439
+
self.node_mapping.inputs['Scale'].default_value = scale
440
+
441
+
scale = property(scale_get, scale_set)
+250
xnalara_io_Tools/read_ascii_xps.py
+250
xnalara_io_Tools/read_ascii_xps.py
···
1
+
import io
2
+
import ntpath
3
+
4
+
from mathutils import Vector
5
+
6
+
from . import ascii_ops, xps_const, xps_types
7
+
8
+
9
+
def readUvVert(file):
10
+
line = ascii_ops.readline(file)
11
+
values = ascii_ops.splitValues(line)
12
+
x = (ascii_ops.getFloat(values[0])) # X pos
13
+
y = (ascii_ops.getFloat(values[1])) # Y pos
14
+
coords = [x, y]
15
+
return coords
16
+
17
+
18
+
def readXYZ(file):
19
+
line = ascii_ops.readline(file)
20
+
values = ascii_ops.splitValues(line)
21
+
22
+
x = (ascii_ops.getFloat(values[0])) # X pos
23
+
y = (ascii_ops.getFloat(values[1])) # Y pos
24
+
z = (ascii_ops.getFloat(values[2])) # Z pos
25
+
coords = [x, y, z]
26
+
return coords
27
+
28
+
29
+
def fillArray(array, minLen, value):
30
+
# Complete the array with selected value
31
+
filled = array + [value] * (minLen - len(array))
32
+
return filled
33
+
34
+
35
+
def read4Float(file):
36
+
line = ascii_ops.readline(file)
37
+
values = ascii_ops.splitValues(line)
38
+
values = fillArray(values, 4, 0)
39
+
x = (ascii_ops.getFloat(values[0]))
40
+
y = (ascii_ops.getFloat(values[1]))
41
+
z = (ascii_ops.getFloat(values[2]))
42
+
w = (ascii_ops.getFloat(values[3]))
43
+
coords = [x, y, z, w]
44
+
return coords
45
+
46
+
47
+
def readBoneWeight(file):
48
+
line = ascii_ops.readline(file)
49
+
values = ascii_ops.splitValues(line)
50
+
values = fillArray(values, 4, 0)
51
+
weights = [ascii_ops.getFloat(val) for val in values]
52
+
return weights
53
+
54
+
55
+
def readBoneId(file):
56
+
line = ascii_ops.readline(file)
57
+
values = ascii_ops.splitValues(line)
58
+
values = fillArray(values, 4, 0)
59
+
ids = [ascii_ops.getInt(val) for val in values]
60
+
return ids
61
+
62
+
63
+
def read4Int(file):
64
+
line = ascii_ops.readline(file)
65
+
values = ascii_ops.splitValues(line)
66
+
values = fillArray(values, 4, 0)
67
+
r = ascii_ops.getInt(values[0])
68
+
g = ascii_ops.getInt(values[1])
69
+
b = ascii_ops.getInt(values[2])
70
+
a = ascii_ops.getInt(values[3])
71
+
vertexColor = [r, g, b, a]
72
+
return vertexColor
73
+
74
+
75
+
def readTriIdxs(file):
76
+
line = ascii_ops.readline(file)
77
+
values = ascii_ops.splitValues(line)
78
+
face1 = ascii_ops.getInt(values[0])
79
+
face2 = ascii_ops.getInt(values[1])
80
+
face3 = ascii_ops.getInt(values[2])
81
+
faceLoop = [face1, face2, face3]
82
+
return faceLoop
83
+
84
+
85
+
def readBones(file):
86
+
bones = []
87
+
# Bone Count
88
+
boneCount = ascii_ops.readInt(file)
89
+
for boneId in range(boneCount):
90
+
boneName = ascii_ops.readString(file)
91
+
parentId = ascii_ops.readInt(file)
92
+
coords = readXYZ(file)
93
+
94
+
xpsBone = xps_types.XpsBone(boneId, boneName, coords, parentId)
95
+
bones.append(xpsBone)
96
+
return bones
97
+
98
+
99
+
def readMeshes(file, hasBones):
100
+
meshes = []
101
+
meshCount = ascii_ops.readInt(file)
102
+
103
+
for meshId in range(meshCount):
104
+
# Name
105
+
meshName = ascii_ops.readString(file)
106
+
if not meshName:
107
+
meshName = 'xxx'
108
+
# print('Mesh Name:', meshName)
109
+
# uv Count
110
+
uvLayerCount = ascii_ops.readInt(file)
111
+
# Textures
112
+
textures = []
113
+
textureCount = ascii_ops.readInt(file)
114
+
for texId in range(textureCount):
115
+
textureFile = ntpath.basename(ascii_ops.readString(file))
116
+
# print('Texture file', textureFile)
117
+
uvLayerId = ascii_ops.readInt(file)
118
+
119
+
xpsTexture = xps_types.XpsTexture(texId, textureFile, uvLayerId)
120
+
textures.append(xpsTexture)
121
+
122
+
# Vertices
123
+
vertex = []
124
+
vertexCount = ascii_ops.readInt(file)
125
+
for vertexId in range(vertexCount):
126
+
coord = readXYZ(file)
127
+
normal = readXYZ(file)
128
+
vertexColor = read4Int(file)
129
+
130
+
uvs = []
131
+
for uvLayerId in range(uvLayerCount):
132
+
uvVert = readUvVert(file)
133
+
uvs.append(uvVert)
134
+
# if ????
135
+
# tangent????
136
+
# tangent = read4float(file)
137
+
138
+
boneWeights = []
139
+
if hasBones:
140
+
# if cero bones dont have weights to read
141
+
boneIdx = readBoneId(file)
142
+
boneWeight = readBoneWeight(file)
143
+
144
+
for idx in range(len(boneIdx)):
145
+
boneWeights.append(
146
+
xps_types.BoneWeight(boneIdx[idx], boneWeight[idx]))
147
+
xpsVertex = xps_types.XpsVertex(
148
+
vertexId, coord, normal, vertexColor, uvs, boneWeights)
149
+
vertex.append(xpsVertex)
150
+
151
+
# Faces
152
+
faces = []
153
+
triCount = ascii_ops.readInt(file)
154
+
for i in range(triCount):
155
+
triIdxs = readTriIdxs(file)
156
+
faces.append(triIdxs)
157
+
xpsMesh = xps_types.XpsMesh(
158
+
meshName, textures, vertex, faces, uvLayerCount)
159
+
meshes.append(xpsMesh)
160
+
return meshes
161
+
162
+
163
+
def readPoseFile(file):
164
+
return file.read()
165
+
166
+
167
+
def poseData(string):
168
+
poseData = {}
169
+
poseList = string.split('\n')
170
+
for bonePose in poseList:
171
+
if bonePose:
172
+
pose = bonePose.split(':')
173
+
174
+
boneName = pose[0]
175
+
dataList = fillArray(pose[1].split(), 9, 1)
176
+
rotDelta = Vector((
177
+
ascii_ops.getFloat(dataList[0]),
178
+
ascii_ops.getFloat(dataList[1]),
179
+
ascii_ops.getFloat(dataList[2])))
180
+
coordDelta = Vector((
181
+
ascii_ops.getFloat(dataList[3]),
182
+
ascii_ops.getFloat(dataList[4]),
183
+
ascii_ops.getFloat(dataList[5])))
184
+
scale = Vector((
185
+
ascii_ops.getFloat(dataList[6]),
186
+
ascii_ops.getFloat(dataList[7]),
187
+
ascii_ops.getFloat(dataList[8])))
188
+
189
+
bonePose = xps_types.XpsBonePose(
190
+
boneName, coordDelta, rotDelta, scale)
191
+
poseData[boneName] = bonePose
192
+
return poseData
193
+
194
+
195
+
def boneDictData(string):
196
+
boneDictRename = {}
197
+
boneDictRestore = {}
198
+
poseList = string.split('\n')
199
+
for bonePose in poseList:
200
+
if bonePose:
201
+
pose = bonePose.split(';')
202
+
if len(pose) == 2:
203
+
oldName, newName = pose
204
+
boneDictRename[oldName] = newName
205
+
boneDictRestore[newName] = oldName
206
+
return boneDictRename, boneDictRestore
207
+
208
+
209
+
def readIoStream(filename):
210
+
with open(filename, "r", encoding=xps_const.ENCODING_READ) as a_file:
211
+
ioStream = io.StringIO(a_file.read())
212
+
return ioStream
213
+
214
+
215
+
def readXpsModel(filename):
216
+
ioStream = readIoStream(filename)
217
+
# print('Reading Header')
218
+
# xpsHeader = readHeader(ioStream)
219
+
print('Reading Bones')
220
+
bones = readBones(ioStream)
221
+
hasBones = bool(bones)
222
+
print('Reading Meshes')
223
+
meshes = readMeshes(ioStream, hasBones)
224
+
xpsModelData = xps_types.XpsData(bones=bones, meshes=meshes)
225
+
return xpsModelData
226
+
227
+
228
+
def readXpsPose(filename):
229
+
ioStream = readIoStream(filename)
230
+
# print('Import Pose')
231
+
poseString = readPoseFile(ioStream)
232
+
bonesPose = poseData(poseString)
233
+
return bonesPose
234
+
235
+
236
+
def readBoneDict(filename):
237
+
ioStream = readIoStream(filename)
238
+
boneDictString = readPoseFile(ioStream)
239
+
boneDictRename, boneDictRestore = boneDictData(boneDictString)
240
+
return boneDictRename, boneDictRestore
241
+
242
+
243
+
if __name__ == "__main__":
244
+
readModelfilename = r'G:\3DModeling\XNALara\XNALara_XPS\data\TESTING2\Tekken\Tekken - Lili Bride\generic_item.mesh.ascii'
245
+
readPosefilename = r'G:\3DModeling\XNALara\XNALara_XPS\data\TESTING2\Tekken\Tekken - Lili Bride\Lili 1.pose'
246
+
247
+
print('----READ START----')
248
+
xpsData = readXpsModel(readModelfilename)
249
+
xpsData = readXpsPose(readPosefilename)
250
+
print('----READ END----')
+405
xnalara_io_Tools/read_bin_xps.py
+405
xnalara_io_Tools/read_bin_xps.py
···
1
+
import io
2
+
import ntpath
3
+
4
+
from . import bin_ops
5
+
from . import read_ascii_xps
6
+
from . import xps_const
7
+
from . import xps_types
8
+
9
+
10
+
def flagName(flag):
11
+
flagList = {
12
+
0: xps_const.BACK_FACE_CULLING,
13
+
1: xps_const.ALWAYS_FORCE_CULLING,
14
+
2: xps_const.MODEL_CAST_SHADOWS,
15
+
3: xps_const.TANGENT_SPACE_RED,
16
+
4: xps_const.TANGENT_SPACE_GREEN,
17
+
5: xps_const.TANGENT_SPACE_BLUE,
18
+
6: xps_const.GLOSS,
19
+
7: xps_const.HAS_BONE_DIRECTIONS,
20
+
}
21
+
return flagList.get(flag, flag)
22
+
23
+
24
+
def flagsDefault():
25
+
flags = {
26
+
xps_const.BACK_FACE_CULLING: False,
27
+
xps_const.ALWAYS_FORCE_CULLING: False,
28
+
xps_const.MODEL_CAST_SHADOWS: True,
29
+
xps_const.TANGENT_SPACE_RED: 0, # Straight X channel
30
+
xps_const.TANGENT_SPACE_GREEN: 1, # Invert Y channel
31
+
xps_const.TANGENT_SPACE_BLUE: 0, # Straight Z channel
32
+
xps_const.GLOSS: 10,
33
+
xps_const.HAS_BONE_DIRECTIONS: False,
34
+
}
35
+
return flags
36
+
37
+
38
+
def flagValue(flag, value):
39
+
# Flags
40
+
# 00: Backface culling
41
+
# 01: Always force culling
42
+
# 02: Model cast shadows
43
+
# 06: Save current bump specular gloss
44
+
45
+
if flag in (0, 1, 2, 6, 7):
46
+
return bool(value)
47
+
# Flags
48
+
# 03: X space
49
+
# 04: Y space
50
+
# 05: Z space
51
+
elif flag in (3, 4, 5):
52
+
return (value % 2)
53
+
else:
54
+
return value
55
+
56
+
57
+
def intToCoords(flag):
58
+
flagValue = {
59
+
0: '+',
60
+
1: '-',
61
+
}
62
+
return flagValue.get(flag, 'Uk')
63
+
64
+
65
+
def printNormalMapSwizzel(tangentSpaceRed, tangentSpaceGreen, tangentSpaceBlue):
66
+
# Default XPS NormalMapTangentSpace == 0 1 0 == X+ Y- Z+
67
+
print('Tangent Space Normal Map Swizzel Coordinates:')
68
+
print('X{} Y{} Z{}'.format(intToCoords(tangentSpaceRed), intToCoords(tangentSpaceGreen), intToCoords(tangentSpaceBlue)))
69
+
print('')
70
+
71
+
72
+
def readFilesString(file):
73
+
lengthByte2 = 0
74
+
75
+
lengthByte1 = bin_ops.readByte(file)
76
+
77
+
if (lengthByte1 >= xps_const.LIMIT):
78
+
lengthByte2 = bin_ops.readByte(file)
79
+
length = (lengthByte1 % xps_const.LIMIT) + (lengthByte2 * xps_const.LIMIT)
80
+
81
+
string = bin_ops.readString(file, length)
82
+
return string
83
+
84
+
85
+
def readVertexColor(file):
86
+
r = bin_ops.readByte(file)
87
+
g = bin_ops.readByte(file)
88
+
b = bin_ops.readByte(file)
89
+
a = bin_ops.readByte(file)
90
+
vertexColor = [r, g, b, a]
91
+
return vertexColor
92
+
93
+
94
+
def readUvVert(file):
95
+
x = bin_ops.readSingle(file) # X pos
96
+
y = bin_ops.readSingle(file) # Y pos
97
+
coords = [x, y]
98
+
return coords
99
+
100
+
101
+
def readXYZ(file):
102
+
x = bin_ops.readSingle(file) # X pos
103
+
y = bin_ops.readSingle(file) # Y pos
104
+
z = bin_ops.readSingle(file) # Z pos
105
+
coords = [x, y, z]
106
+
return coords
107
+
108
+
109
+
def read4Float(file):
110
+
x = bin_ops.readSingle(file)
111
+
y = bin_ops.readSingle(file)
112
+
z = bin_ops.readSingle(file)
113
+
w = bin_ops.readSingle(file)
114
+
coords = [x, y, z, w]
115
+
return coords
116
+
117
+
118
+
def read4Int16(file):
119
+
r = bin_ops.readInt16(file)
120
+
g = bin_ops.readInt16(file)
121
+
b = bin_ops.readInt16(file)
122
+
a = bin_ops.readInt16(file)
123
+
vertexColor = [r, g, b, a]
124
+
return vertexColor
125
+
126
+
127
+
def readTriIdxs(file):
128
+
face1 = bin_ops.readUInt32(file)
129
+
face2 = bin_ops.readUInt32(file)
130
+
face3 = bin_ops.readUInt32(file)
131
+
faceLoop = [face1, face2, face3]
132
+
return faceLoop
133
+
134
+
135
+
def readHeader(file):
136
+
xpsHeader = xps_types.XpsHeader()
137
+
flags = flagsDefault()
138
+
139
+
# MagicNumber
140
+
magic_number = bin_ops.readUInt32(file)
141
+
# XPS Version
142
+
version_mayor = bin_ops.readUInt16(file)
143
+
version_minor = bin_ops.readUInt16(file)
144
+
# XNAaral Name
145
+
xna_aral = readFilesString(file)
146
+
# Settings Length
147
+
settingsLen = bin_ops.readUInt32(file)
148
+
# MachineName
149
+
machineName = readFilesString(file)
150
+
# UserName
151
+
userName = readFilesString(file)
152
+
# File-->File
153
+
filesString = readFilesString(file)
154
+
xpsPoseData = None
155
+
156
+
# print('*'*80)
157
+
hasTangent = bin_ops.hasTangentVersion(version_mayor, version_minor)
158
+
if (hasTangent):
159
+
# print('OLD Format')
160
+
settingsStream = io.BytesIO(file.read(settingsLen * 4))
161
+
else:
162
+
# print('NEW Format')
163
+
valuesRead = 0
164
+
hash = bin_ops.readUInt32(file)
165
+
valuesRead += 1 * 4
166
+
items = bin_ops.readUInt32(file)
167
+
valuesRead += 1 * 4
168
+
# print('hash', hash)
169
+
# print('items', items)
170
+
for i in range(items):
171
+
# print('valuesRead', valuesRead)
172
+
optType = bin_ops.readUInt32(file)
173
+
valuesRead += 1 * 4
174
+
optcount = bin_ops.readUInt32(file)
175
+
valuesRead += 1 * 4
176
+
optInfo = bin_ops.readUInt32(file)
177
+
valuesRead += 1 * 4
178
+
179
+
# print('------')
180
+
# print('count',i)
181
+
# print('optType',optType)
182
+
# print('optcount',optcount)
183
+
# print('optInfo',optInfo)
184
+
185
+
if (optType == 0):
186
+
# print('Read None')
187
+
readNone(file, optcount)
188
+
valuesRead += optcount * 2
189
+
elif (optType == 1):
190
+
# print('Read Pose')
191
+
xpsPoseData = readDefaultPose(file, optcount, optInfo)
192
+
readCount = bin_ops.roundToMultiple(optcount, xps_const.ROUND_MULTIPLE)
193
+
valuesRead += readCount
194
+
elif (optType == 2):
195
+
# print('Read Flags')
196
+
flags = readFlags(file, optcount)
197
+
valuesRead += optcount * 2 * 4
198
+
else:
199
+
# print('Read Waste')
200
+
loopStart = valuesRead // 4
201
+
loopFinish = settingsLen
202
+
# print (loopStart, loopFinish)
203
+
for j in range(loopStart, loopFinish):
204
+
# print('waste',j - loopStart)
205
+
waste = bin_ops.readUInt32(file)
206
+
207
+
xpsHeader.magic_number = magic_number
208
+
xpsHeader.version_mayor = version_mayor
209
+
xpsHeader.version_minor = version_minor
210
+
xpsHeader.xna_aral = xna_aral
211
+
xpsHeader.settingsLen = settingsLen
212
+
xpsHeader.machine = machineName
213
+
xpsHeader.user = userName
214
+
xpsHeader.files = filesString
215
+
xpsHeader.pose = xpsPoseData
216
+
xpsHeader.flags = flags
217
+
return xpsHeader
218
+
219
+
220
+
def findHeader(file):
221
+
header = None
222
+
223
+
# Check for MAGIC_NUMBER
224
+
number = bin_ops.readUInt32(file)
225
+
file.seek(0)
226
+
227
+
if (number == xps_const.MAGIC_NUMBER):
228
+
print('Header Found')
229
+
header = readHeader(file)
230
+
231
+
# logHeader(header)
232
+
return header
233
+
234
+
235
+
def readNone(file, optcount):
236
+
for i in range(optcount):
237
+
waste = bin_ops.readUInt32(file)
238
+
239
+
240
+
def readFlags(file, optcount):
241
+
flags = {}
242
+
for i in range(optcount):
243
+
flag = bin_ops.readUInt32(file)
244
+
value = bin_ops.readUInt32(file)
245
+
flags[flagName(flag)] = flagValue(flag, value)
246
+
printNormalMapSwizzel(flags[flagName(3)], flags[flagName(4)], flags[flagName(5)])
247
+
return flags
248
+
249
+
250
+
def logHeader(xpsHeader):
251
+
print("MAGIX:", xpsHeader.magic_number)
252
+
print('VER MAYOR:', xpsHeader.version_mayor)
253
+
print('VER MINOR:', xpsHeader.version_minor)
254
+
print('NAME:', xpsHeader.xna_aral)
255
+
print('SETTINGS LEN:', xpsHeader.settingsLen)
256
+
print('MACHINE:', xpsHeader.machine)
257
+
print('USR:', xpsHeader.user)
258
+
print('FILES:', xpsHeader.files)
259
+
print('SETTING:', xpsHeader.settings)
260
+
print('DEFAULT POSE:', xpsHeader.pose)
261
+
262
+
263
+
def readBones(file, header):
264
+
bones = []
265
+
# Bone Count
266
+
boneCount = bin_ops.readUInt32(file)
267
+
268
+
for boneId in range(boneCount):
269
+
boneName = readFilesString(file)
270
+
parentId = bin_ops.readInt16(file)
271
+
coords = readXYZ(file)
272
+
273
+
xpsBone = xps_types.XpsBone(boneId, boneName, coords, parentId)
274
+
bones.append(xpsBone)
275
+
return bones
276
+
277
+
278
+
def readMeshes(file, xpsHeader, hasBones):
279
+
meshes = []
280
+
meshCount = bin_ops.readUInt32(file)
281
+
282
+
hasHeader = bool(xpsHeader)
283
+
284
+
verMayor = xpsHeader.version_mayor if hasHeader else 0
285
+
verMinor = xpsHeader.version_minor if hasHeader else 0
286
+
287
+
hasTangent = bin_ops.hasTangentVersion(verMayor, verMinor, hasHeader)
288
+
hasVariableWeights = bin_ops.hasVariableWeights(verMayor, verMinor, hasHeader)
289
+
290
+
for meshId in range(meshCount):
291
+
# Name
292
+
meshName = readFilesString(file)
293
+
if not meshName:
294
+
meshName = 'unnamed'
295
+
# print('Mesh Name:', meshName)
296
+
# uv Count
297
+
uvLayerCount = bin_ops.readUInt32(file)
298
+
# Textures
299
+
textures = []
300
+
textureCount = bin_ops.readUInt32(file)
301
+
for texId in range(textureCount):
302
+
textureFile = ntpath.basename(readFilesString(file))
303
+
# print('Texture file', textureFile)
304
+
uvLayerId = bin_ops.readUInt32(file)
305
+
306
+
xpsTexture = xps_types.XpsTexture(texId, textureFile, uvLayerId)
307
+
textures.append(xpsTexture)
308
+
309
+
# Vertices
310
+
vertex = []
311
+
vertexCount = bin_ops.readUInt32(file)
312
+
313
+
for vertexId in range(vertexCount):
314
+
coord = readXYZ(file)
315
+
normal = readXYZ(file)
316
+
vertexColor = readVertexColor(file)
317
+
318
+
uvs = []
319
+
for uvLayerId in range(uvLayerCount):
320
+
uvVert = readUvVert(file)
321
+
uvs.append(uvVert)
322
+
if hasTangent:
323
+
tangent = read4Float(file)
324
+
325
+
boneWeights = []
326
+
if hasBones:
327
+
# if cero bones dont have weights to read
328
+
329
+
boneIdx = []
330
+
boneWeight = []
331
+
if hasVariableWeights:
332
+
weightsCount = bin_ops.readInt16(file)
333
+
else:
334
+
weightsCount = 4
335
+
336
+
for x in range(weightsCount):
337
+
boneIdx.append(bin_ops.readInt16(file))
338
+
for x in range(weightsCount):
339
+
boneWeight.append(bin_ops.readSingle(file))
340
+
341
+
for idx in range(len(boneIdx)):
342
+
boneWeights.append(
343
+
xps_types.BoneWeight(boneIdx[idx], boneWeight[idx]))
344
+
xpsVertex = xps_types.XpsVertex(
345
+
vertexId, coord, normal, vertexColor, uvs, boneWeights)
346
+
vertex.append(xpsVertex)
347
+
348
+
# Faces
349
+
faces = []
350
+
triCount = bin_ops.readUInt32(file)
351
+
for i in range(triCount):
352
+
triIdxs = readTriIdxs(file)
353
+
faces.append(triIdxs)
354
+
xpsMesh = xps_types.XpsMesh(
355
+
meshName, textures, vertex, faces, uvLayerCount)
356
+
meshes.append(xpsMesh)
357
+
return meshes
358
+
359
+
360
+
def readIoStream(filename):
361
+
with open(filename, "rb") as a_file:
362
+
ioStream = io.BytesIO(a_file.read())
363
+
return ioStream
364
+
365
+
366
+
def readXpsModel(filename):
367
+
print('File:', filename)
368
+
369
+
ioStream = readIoStream(filename)
370
+
print('Reading Header')
371
+
xpsHeader = findHeader(ioStream)
372
+
print('Reading Bones')
373
+
bones = readBones(ioStream, xpsHeader)
374
+
hasBones = bool(bones)
375
+
print('Read', len(bones), 'Bones')
376
+
print('Reading Meshes')
377
+
meshes = readMeshes(ioStream, xpsHeader, hasBones)
378
+
print('Read', len(meshes), 'Meshes')
379
+
380
+
xpsData = xps_types.XpsData(xpsHeader, bones, meshes)
381
+
return xpsData
382
+
383
+
384
+
def readDefaultPose(file, poseLenghtUnround, poseBones):
385
+
# print('Import Pose')
386
+
poseBytes = b''
387
+
if poseLenghtUnround:
388
+
for i in range(0, poseBones):
389
+
poseBytes += file.readline()
390
+
391
+
poseLenght = bin_ops.roundToMultiple(
392
+
poseLenghtUnround, xps_const.ROUND_MULTIPLE)
393
+
emptyBytes = poseLenght - poseLenghtUnround
394
+
file.read(emptyBytes)
395
+
poseString = bin_ops.decodeBytes(poseBytes)
396
+
bonesPose = read_ascii_xps.poseData(poseString)
397
+
return bonesPose
398
+
399
+
400
+
if __name__ == "__main__":
401
+
readfilename = r'G:\3DModeling\XNALara\XNALara_XPS\Young Samus\Generic_Item.mesh'
402
+
403
+
print('----READ START----')
404
+
xpsData = readXpsModel(readfilename)
405
+
print('----READ END----')
+34
xnalara_io_Tools/timing.py
+34
xnalara_io_Tools/timing.py
···
1
+
import time
2
+
3
+
import io
4
+
import cProfile
5
+
import pstats
6
+
7
+
8
+
def profile(fnc):
9
+
"""Create decorator function that uses cProfile to profile a function."""
10
+
def inner(*args, **kwargs):
11
+
12
+
pr = cProfile.Profile()
13
+
pr.enable()
14
+
retval = fnc(*args, **kwargs)
15
+
pr.disable()
16
+
s = io.StringIO()
17
+
sortby = 'cumulative'
18
+
ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
19
+
ps.print_stats()
20
+
print(s.getvalue())
21
+
return retval
22
+
23
+
return inner
24
+
25
+
26
+
def timing(f):
27
+
def wrap(*args):
28
+
time1 = time.time()
29
+
ret = f(*args)
30
+
time2 = time.time()
31
+
print('%s function took %0.3f ms' % (f.__name__,
32
+
(time2 - time1) * 1000.0))
33
+
return ret
34
+
return wrap
+5
xnalara_io_Tools/utilities/color_utilities.py
+5
xnalara_io_Tools/utilities/color_utilities.py
+20
xnalara_io_Tools/utilities/mesh_utilities.py
+20
xnalara_io_Tools/utilities/mesh_utilities.py
···
1
+
import bpy
2
+
3
+
4
+
def create_split_normals(mesh_object: bpy.types.Object, normals: list[tuple[float, float, float]]):
5
+
mesh_data: bpy.types.Mesh = mesh_object.data
6
+
b_mesh_was_corrected = False
7
+
8
+
if (bpy.app.version[:2] in [(3, 6), (4, 0)]):
9
+
mesh_data.create_normals_split()
10
+
b_mesh_was_corrected = mesh_data.validate(clean_customdata=False)
11
+
mesh_data.update(calc_edges=True)
12
+
mesh_data.normals_split_custom_set_from_vertices(normals)
13
+
mesh_data.use_auto_smooth = True
14
+
15
+
else:
16
+
b_mesh_was_corrected = mesh_data.validate(clean_customdata=False)
17
+
mesh_data.update(calc_edges=True)
18
+
mesh_data.normals_split_custom_set_from_vertices(normals)
19
+
20
+
return b_mesh_was_corrected
+172
xnalara_io_Tools/write_ascii_xps.py
+172
xnalara_io_Tools/write_ascii_xps.py
···
1
+
import io
2
+
import operator
3
+
4
+
from . import read_ascii_xps
5
+
from . import xps_const
6
+
from mathutils import Vector
7
+
8
+
9
+
def writeBones(xpsSettings, bones):
10
+
bonesString = io.StringIO()
11
+
if bones:
12
+
bonesString.write('{:d} # bones\n'.format(len(bones)))
13
+
14
+
for bone in bones:
15
+
name = bone.name
16
+
parentId = bone.parentId
17
+
co = bone.co
18
+
if parentId is None:
19
+
parentId = -1
20
+
bonesString.write('{}\n'.format(name))
21
+
bonesString.write('{:d} # parent index\n'.format(parentId))
22
+
bonesString.write('{:.7G} {:.7G} {:.7G}\n'.format(*co))
23
+
bonesString.seek(0)
24
+
return bonesString
25
+
26
+
27
+
def writeMeshes(xpsSettings, meshes):
28
+
meshesString = io.StringIO()
29
+
meshesString.write('{:d} # meshes\n'.format(len(meshes)))
30
+
sortedMeshes = sorted(meshes, key=operator.attrgetter('name'))
31
+
32
+
for mesh in sortedMeshes:
33
+
# Name
34
+
meshesString.write(mesh.name + '\n')
35
+
# uv Count
36
+
meshesString.write('{:d} # uv layers\n'.format(mesh.uvCount))
37
+
# Textures
38
+
meshesString.write('{:d} # textures\n'.format(len(mesh.textures)))
39
+
for texture in mesh.textures:
40
+
meshesString.write(texture.file + '\n')
41
+
meshesString.write(
42
+
'{:d} # uv layer index\n'.format(texture.uvLayer))
43
+
44
+
# Vertices
45
+
meshesString.write('{:d} # vertices\n'.format(len(mesh.vertices)))
46
+
for vertex in mesh.vertices:
47
+
meshesString.write(
48
+
'{:.7G} {:.7G} {:.7G} # Coords\n'.format(*vertex.co))
49
+
meshesString.write('{:.7G} {:.7G} {:.7G}\n'.format(*vertex.norm))
50
+
meshesString.write('{:d} {:d} {:d} {:d}\n'.format(*vertex.vColor))
51
+
52
+
for uv in vertex.uv:
53
+
meshesString.write('{:.7G} {:.7G}\n'.format(*uv))
54
+
# if ????
55
+
# tangent????
56
+
# meshesString.write(write4float(xxx))
57
+
58
+
length = len(vertex.boneWeights)
59
+
idFormatString = ' '.join(['{:d}', ] * length)
60
+
weightFormatString = ' '.join(['{:.7G}', ] * length)
61
+
62
+
# Sort first the biggest weights
63
+
boneWeights = sorted(
64
+
vertex.boneWeights,
65
+
key=lambda bw: bw.weight,
66
+
reverse=True)
67
+
68
+
meshesString.write(
69
+
(idFormatString + '\n').format(*[bw.id for bw in boneWeights]))
70
+
meshesString.write(
71
+
(weightFormatString + '\n').format(*[bw.weight for bw in boneWeights]))
72
+
73
+
# Faces
74
+
meshesString.write('{:d} # faces\n'.format(len(mesh.faces)))
75
+
for face in mesh.faces:
76
+
meshesString.write('{:d} {:d} {:d}\n'.format(*face))
77
+
78
+
meshesString.seek(0)
79
+
return meshesString
80
+
81
+
82
+
def writePose(xpsData):
83
+
poseString = io.StringIO()
84
+
sortedPose = sorted(xpsData.items(), key=operator.itemgetter(0))
85
+
86
+
for boneData in sortedPose:
87
+
xpsBoneData = boneData[1]
88
+
boneName = xpsBoneData.boneName
89
+
rotDelta = roundRot(xpsBoneData.rotDelta)
90
+
coordDelta = roundTrans(xpsBoneData.coordDelta)
91
+
scale = roundScale(xpsBoneData.scale)
92
+
93
+
x1 = '{}: '.format(boneName)
94
+
x2 = '{:G} {:G} {:G} '.format(*rotDelta)
95
+
x3 = '{:G} {:G} {:G} '.format(*coordDelta)
96
+
x4 = '{:G} {:G} {:G} '.format(*scale)
97
+
98
+
poseString.write(x1)
99
+
poseString.write(x2)
100
+
poseString.write(x3)
101
+
poseString.write(x4)
102
+
poseString.write('\n')
103
+
104
+
poseString.seek(0)
105
+
return poseString
106
+
107
+
108
+
def writeXpsPose(filename, xpsData):
109
+
ioStream = io.StringIO()
110
+
print('Export Pose')
111
+
ioStream.write(writePose(xpsData).read())
112
+
ioStream.seek(0)
113
+
writeIoStream(filename, ioStream)
114
+
115
+
116
+
def roundRot(vector):
117
+
x = round(vector.x, 1) + 0
118
+
y = round(vector.y, 1) + 0
119
+
z = round(vector.z, 1) + 0
120
+
return Vector((x, y, z))
121
+
122
+
123
+
def roundTrans(vector):
124
+
x = round(vector.x, 4) + 0
125
+
y = round(vector.y, 4) + 0
126
+
z = round(vector.z, 4) + 0
127
+
return Vector((x, y, z))
128
+
129
+
130
+
def roundScale(vector):
131
+
x = round(vector.x, 3) + 0
132
+
y = round(vector.y, 3) + 0
133
+
z = round(vector.z, 3) + 0
134
+
return Vector((x, y, z))
135
+
136
+
137
+
def writeIoStream(filename, ioStream):
138
+
with open(filename, "w", encoding=xps_const.ENCODING_WRITE) as a_file:
139
+
a_file.write(ioStream.read())
140
+
141
+
142
+
def writeBoneDict(filename, boneDictList):
143
+
ioStream = io.StringIO()
144
+
ioStream.write(boneDictList)
145
+
ioStream.seek(0)
146
+
writeIoStream(filename, ioStream)
147
+
148
+
149
+
def writeXpsModel(xpsSettings, filename, xpsData):
150
+
ioStream = io.StringIO()
151
+
print('Writing Bones')
152
+
ioStream.write(writeBones(xpsSettings, xpsData.bones).read())
153
+
print('Writing Meshes')
154
+
ioStream.write(writeMeshes(xpsSettings, xpsData.meshes).read())
155
+
ioStream.seek(0)
156
+
writeIoStream(filename, ioStream)
157
+
158
+
159
+
if __name__ == "__main__":
160
+
readfilename = r'G:\3DModeling\XNALara\XNALara_XPS\data\TESTING\Alice Returns - Mods\Alice 001 Fetish Cat\generic_item2.mesh.ascii'
161
+
writefilename = r'G:\3DModeling\XNALara\XNALara_XPS\data\TESTING\Alice Returns - Mods\Alice 001 Fetish Cat\generic_item3.mesh.ascii'
162
+
163
+
# Simulate XPS Data
164
+
# from . import mock_xps_data
165
+
# xpsData = mock_xps_data.mockData()
166
+
167
+
# import XPS File
168
+
xpsData = read_ascii_xps.readXpsModel(readfilename)
169
+
170
+
print('----WRITE START----')
171
+
writeXpsModel(writefilename, xpsData)
172
+
print('----WRITE END----')
+245
xnalara_io_Tools/write_bin_xps.py
+245
xnalara_io_Tools/write_bin_xps.py
···
1
+
import io
2
+
import operator
3
+
4
+
from . import bin_ops
5
+
from . import read_bin_xps
6
+
from . import xps_const
7
+
8
+
9
+
def writeFilesString(string):
10
+
byteString = bytearray()
11
+
length1 = 0
12
+
13
+
stringBin = bin_ops.writeString(string)
14
+
length = len(stringBin)
15
+
divQuot, divRem = divmod(length, xps_const.LIMIT)
16
+
17
+
if (length >= xps_const.LIMIT):
18
+
length1 += xps_const.LIMIT
19
+
20
+
# First Lenght Byte
21
+
length1 += divRem
22
+
byteString.append(length1)
23
+
24
+
if (divQuot):
25
+
# Second Lenght Byte
26
+
length2 = divQuot
27
+
byteString.append(length2)
28
+
byteString.extend(stringBin)
29
+
return byteString
30
+
31
+
32
+
def writeVertexColor(co):
33
+
r = bin_ops.writeByte(co[0])
34
+
g = bin_ops.writeByte(co[1])
35
+
b = bin_ops.writeByte(co[2])
36
+
a = bin_ops.writeByte(co[3])
37
+
vertexColor = bytearray()
38
+
vertexColor.extend(r)
39
+
vertexColor.extend(g)
40
+
vertexColor.extend(b)
41
+
vertexColor.extend(a)
42
+
return vertexColor
43
+
44
+
45
+
def writeUvVert(co):
46
+
x = bin_ops.writeSingle(co[0]) # X pos
47
+
y = bin_ops.writeSingle(co[1]) # Y pos
48
+
coords = bytearray()
49
+
coords.extend(x)
50
+
coords.extend(y)
51
+
return coords
52
+
53
+
54
+
def writeXYZ(co):
55
+
x = bin_ops.writeSingle(co[0]) # X pos
56
+
y = bin_ops.writeSingle(co[1]) # Y pos
57
+
z = bin_ops.writeSingle(co[2]) # Z pos
58
+
coords = bytearray()
59
+
coords.extend(x)
60
+
coords.extend(y)
61
+
coords.extend(z)
62
+
return coords
63
+
64
+
65
+
def write4Float(co):
66
+
x = bin_ops.writeSingle(co[0]) # X pos
67
+
y = bin_ops.writeSingle(co[1]) # Y pos
68
+
z = bin_ops.writeSingle(co[2]) # Z pos
69
+
w = bin_ops.writeSingle(co[3]) # W pos
70
+
coords = bytearray()
71
+
coords.extend(x)
72
+
coords.extend(y)
73
+
coords.extend(z)
74
+
coords.extend(w)
75
+
return coords
76
+
77
+
78
+
def write4UInt16(co):
79
+
r = bin_ops.writeInt16(co[0])
80
+
g = bin_ops.writeInt16(co[1])
81
+
b = bin_ops.writeInt16(co[2])
82
+
a = bin_ops.writeInt16(co[3])
83
+
vertexColor = bytearray()
84
+
vertexColor.extend(r)
85
+
vertexColor.extend(g)
86
+
vertexColor.extend(b)
87
+
vertexColor.extend(a)
88
+
return vertexColor
89
+
90
+
91
+
def writeTriIdxs(co):
92
+
face1 = bin_ops.writeUInt32(co[0])
93
+
face2 = bin_ops.writeUInt32(co[1])
94
+
face3 = bin_ops.writeUInt32(co[2])
95
+
faceLoop = bytearray()
96
+
faceLoop.extend(face1)
97
+
faceLoop.extend(face2)
98
+
faceLoop.extend(face3)
99
+
return faceLoop
100
+
101
+
102
+
def logHeader(xpsHeader):
103
+
print("MAGIX:", xpsHeader.magic_number)
104
+
print('VER MAYOR:', xpsHeader.version_mayor)
105
+
print('VER MINOR:', xpsHeader.version_minor)
106
+
print('NAME:', xpsHeader.xna_aral)
107
+
print('SETTINGS LEN:', xpsHeader.settingsLen)
108
+
print('MACHINE:', xpsHeader.machine)
109
+
print('USR:', xpsHeader.user)
110
+
print('FILES:', xpsHeader.files)
111
+
print('SETTING:', xpsHeader.settings)
112
+
print('DEFAULT POSE:', xpsHeader.pose)
113
+
114
+
115
+
def writeHeader(xpsSettings, header):
116
+
headerArray = bytearray()
117
+
if header:
118
+
# MagicNumber
119
+
headerArray.extend(bin_ops.writeUInt32(header.magic_number))
120
+
# XPS Model Version
121
+
headerArray.extend(bin_ops.writeUInt16(header.version_mayor))
122
+
headerArray.extend(bin_ops.writeUInt16(header.version_minor))
123
+
# XNAaral Name
124
+
headerArray.extend(writeFilesString(header.xna_aral))
125
+
# Settings Len (unit32*4)
126
+
headerArray.extend(bin_ops.writeUInt32(header.settingsLen))
127
+
# MachineName
128
+
headerArray.extend(writeFilesString(header.machine))
129
+
# UserName
130
+
headerArray.extend(writeFilesString(header.user))
131
+
# File-->File
132
+
headerArray.extend(writeFilesString(header.files))
133
+
# settings
134
+
headerArray.extend(header.settings)
135
+
136
+
return headerArray
137
+
138
+
139
+
def writeBones(xpsSettings, bones):
140
+
bonesArray = bytearray()
141
+
if bones:
142
+
bonesArray.extend(bin_ops.writeUInt32(len(bones)))
143
+
144
+
for bone in bones:
145
+
name = bone.name
146
+
parentId = bone.parentId
147
+
co = bone.co
148
+
if parentId is None:
149
+
parentId = -1
150
+
bonesArray.extend(writeFilesString(name))
151
+
bonesArray.extend(bin_ops.writeInt16(parentId))
152
+
bonesArray.extend(writeXYZ(co))
153
+
return bonesArray
154
+
155
+
156
+
def writeMeshes(xpsSettings, meshes):
157
+
meshCount = len(meshes)
158
+
meshesArray = bytearray(bin_ops.writeUInt32(meshCount))
159
+
sortedMeshes = sorted(meshes, key=operator.attrgetter('name'))
160
+
161
+
verMayor = xpsSettings.versionMayor
162
+
verMinor = xpsSettings.versionMinor
163
+
hasHeader = bin_ops.hasHeader(xpsSettings.format)
164
+
165
+
hasTangent = bin_ops.hasTangentVersion(verMayor, verMinor, hasHeader)
166
+
hasVariableWeights = bin_ops.hasVariableWeights(verMayor, verMinor, hasHeader)
167
+
168
+
for mesh in sortedMeshes:
169
+
# Name
170
+
meshesArray.extend(writeFilesString(mesh.name))
171
+
# uv Count
172
+
meshesArray.extend(bin_ops.writeUInt32(mesh.uvCount))
173
+
# Textures
174
+
meshesArray.extend(bin_ops.writeUInt32(len(mesh.textures)))
175
+
for texture in mesh.textures:
176
+
meshesArray.extend(writeFilesString(texture.file))
177
+
meshesArray.extend(bin_ops.writeUInt32(texture.uvLayer))
178
+
179
+
# Vertices
180
+
meshesArray.extend(bin_ops.writeUInt32(len(mesh.vertices)))
181
+
for vertex in mesh.vertices:
182
+
meshesArray.extend(writeXYZ(vertex.co))
183
+
meshesArray.extend(writeXYZ(vertex.norm))
184
+
meshesArray.extend(writeVertexColor(vertex.vColor))
185
+
186
+
for uv in vertex.uv:
187
+
meshesArray.extend(writeUvVert(uv))
188
+
if hasTangent:
189
+
meshesArray.extend(write4Float([1, 0, 0, 0]))
190
+
191
+
# Sort first the biggest weights
192
+
boneWeights = sorted(
193
+
vertex.boneWeights,
194
+
key=lambda bw: bw.weight,
195
+
reverse=True)
196
+
197
+
if hasVariableWeights:
198
+
weightCount = len(boneWeights)
199
+
meshesArray.extend(bin_ops.writeUInt16(weightCount))
200
+
[meshesArray.extend(bin_ops.writeUInt16(bw.id)) for bw in boneWeights]
201
+
[meshesArray.extend(bin_ops.writeSingle(bw.weight)) for bw in boneWeights]
202
+
else:
203
+
meshesArray.extend(write4UInt16([bw.id for bw in boneWeights]))
204
+
meshesArray.extend(write4Float([bw.weight for bw in boneWeights]))
205
+
206
+
# Faces
207
+
meshesArray.extend(bin_ops.writeUInt32(len(mesh.faces)))
208
+
for face in mesh.faces:
209
+
meshesArray.extend(writeTriIdxs(face))
210
+
211
+
return meshesArray
212
+
213
+
214
+
def writeIoStream(filename, ioStream):
215
+
with open(filename, "wb") as a_file:
216
+
a_file.write(ioStream.read())
217
+
218
+
219
+
def writeXpsModel(xpsSettings, filename, xpsData):
220
+
ioStream = io.BytesIO()
221
+
print('Writing Header')
222
+
ioStream.write(writeHeader(xpsSettings, xpsData.header))
223
+
print('Writing Bones')
224
+
ioStream.write(writeBones(xpsSettings, xpsData.bones))
225
+
print('Writing Meshes')
226
+
ioStream.write(writeMeshes(xpsSettings, xpsData.meshes))
227
+
ioStream.seek(0)
228
+
229
+
writeIoStream(filename, ioStream)
230
+
231
+
232
+
if __name__ == "__main__":
233
+
readfilename1 = r'G:\3DModeling\XNALara\XNALara_XPS\data\TESTING5\Drake\RECB DRAKE Pack_By DamianHandy\DRAKE Sneaking Suitxxz\Generic_Item - XPS pose.mesh'
234
+
writefilename1 = r'G:\3DModeling\XNALara\XNALara_XPS\data\TESTING5\Drake\RECB DRAKE Pack_By DamianHandy\DRAKE Sneaking Suitxxz\Generic_Item - BLENDER pose.mesh'
235
+
236
+
# Simulate XPS Data
237
+
# from . import mock_xps_data
238
+
# xpsData = mock_xps_data.mockData()
239
+
240
+
# import XPS File
241
+
xpsData = read_bin_xps.readXpsModel(readfilename1)
242
+
243
+
print('----WRITE START----')
244
+
writeXpsModel(writefilename1, xpsData)
245
+
print('----WRITE END----')
+21
xnalara_io_Tools/xnal_preferences.py
+21
xnalara_io_Tools/xnal_preferences.py
···
1
+
import bpy
2
+
3
+
from .modules.ALXAddonUpdater.ALXAddonUpdater.ALX_AddonUpdaterUI import \
4
+
update_settings_ui
5
+
6
+
7
+
class XNAlaraMesh4X_AddonPreferences(bpy.types.AddonPreferences):
8
+
9
+
bl_idname = __package__
10
+
11
+
auto_check_update: bpy.props.BoolProperty(name="Auto-check for Update", description="If enabled, auto-check for updates using an interval", default=False) # type:ignore
12
+
13
+
updater_interval_months: bpy.props.IntProperty(name='Months', description="Number of months between checking for updates", default=0, min=0) # type:ignore
14
+
updater_interval_days: bpy.props.IntProperty(name='Days', description="Number of days between checking for updates", default=7, min=0, max=31) # type:ignore
15
+
updater_interval_hours: bpy.props.IntProperty(name='Hours', description="Number of hours between checking for updates", default=0, min=0, max=23) # type:ignore
16
+
updater_interval_minutes: bpy.props.IntProperty(name='Minutes', description="Number of minutes between checking for updates", default=0, min=0, max=59) # type:ignore
17
+
18
+
def draw(self, context: bpy.types.Context):
19
+
layout = self.layout
20
+
21
+
update_settings_ui(context, layout)
+22
xnalara_io_Tools/xps_const.py
+22
xnalara_io_Tools/xps_const.py
···
1
+
MAGIC_NUMBER = 323232
2
+
XPS_VERSION_MAYOR = 3
3
+
XPS_VERSION_MINOR = 15
4
+
XNA_ARAL = 'XNAaraL'
5
+
SETTINGS_LEN = 1080
6
+
LIMIT = 128
7
+
STRLEN = 275
8
+
9
+
ROUND_MULTIPLE = 4
10
+
11
+
ENCODING_READ = 'utf-8-sig'
12
+
ENCODING_WRITE = 'utf-8'
13
+
14
+
# Flags
15
+
BACK_FACE_CULLING = 'backFaceCulling'
16
+
ALWAYS_FORCE_CULLING = 'alwaysForceCulling'
17
+
MODEL_CAST_SHADOWS = 'modelCastShadows'
18
+
TANGENT_SPACE_RED = 'TangentSpaceRed'
19
+
TANGENT_SPACE_GREEN = 'TangentSpaceGreen'
20
+
TANGENT_SPACE_BLUE = 'TangentSpaceBlue'
21
+
GLOSS = 'gloss'
22
+
HAS_BONE_DIRECTIONS = 'hasBoneDirections'
+584
xnalara_io_Tools/xps_material.py
+584
xnalara_io_Tools/xps_material.py
···
1
+
import math
2
+
3
+
from . import ascii_ops
4
+
from enum import Enum
5
+
6
+
7
+
# All available texture types:
8
+
class TextureType(Enum):
9
+
DIFFUSE = 'diffuse' # 1
10
+
LIGHT = 'lightmap' # 2
11
+
BUMP = 'bump' # 3
12
+
MASK = 'mask' # 4
13
+
BUMP1 = 'bump1' # 5
14
+
BUMP2 = 'bump2' # 6
15
+
SPECULAR = 'specular' # 7
16
+
ENVIRONMENT = 'environment' # 8
17
+
EMISSION = 'emission' # 9
18
+
19
+
20
+
class RenderType():
21
+
22
+
def __init__(self):
23
+
self.renderGroupNum = None
24
+
self.meshName = None
25
+
self.specularity = None
26
+
self.texRepeater1 = None
27
+
self.texRepeater2 = None
28
+
self.val4 = None
29
+
30
+
31
+
class RenderGroup:
32
+
33
+
def __init__(self, renderType):
34
+
self.renderType = renderType
35
+
self.renderGroupNum = renderType.renderGroupNum
36
+
self.rgShadding = 'Yes'
37
+
self.rgAlpha = False
38
+
self.rgPosable = True
39
+
self.rgSpecular = 'Yes'
40
+
self.rgBump1Rep = True
41
+
self.rgBump2Rep = True
42
+
self.rgSpec1Rep = False
43
+
self.rgTexCount = 6
44
+
self.rgTexType = [
45
+
TextureType.DIFFUSE,
46
+
TextureType.MASK,
47
+
TextureType.MASK,
48
+
TextureType.MASK,
49
+
TextureType.MASK,
50
+
TextureType.MASK]
51
+
52
+
if self.renderGroupNum == 1:
53
+
self.rgShadding = 'Yes'
54
+
self.rgAlpha = False
55
+
self.rgPosable = True
56
+
self.rgSpecular = 'Yes'
57
+
self.rgBump1Rep = True
58
+
self.rgBump2Rep = True
59
+
self.rgTexCount = 6
60
+
self.rgTexType = [
61
+
TextureType.DIFFUSE,
62
+
TextureType.LIGHT,
63
+
TextureType.BUMP,
64
+
TextureType.MASK,
65
+
TextureType.BUMP1,
66
+
TextureType.BUMP2]
67
+
if self.renderGroupNum == 2:
68
+
self.rgShadding = 'Yes'
69
+
self.rgAlpha = False
70
+
self.rgPosable = True
71
+
self.rgSpecular = 'Yes'
72
+
self.rgBump1Rep = False
73
+
self.rgBump2Rep = False
74
+
self.rgTexCount = 3
75
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.LIGHT, TextureType.BUMP]
76
+
if self.renderGroupNum == 3:
77
+
self.rgShadding = 'Yes'
78
+
self.rgAlpha = False
79
+
self.rgPosable = True
80
+
self.rgSpecular = 'No'
81
+
self.rgBump1Rep = False
82
+
self.rgBump2Rep = False
83
+
self.rgTexCount = 2
84
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.LIGHT]
85
+
if self.renderGroupNum == 4:
86
+
self.rgShadding = 'Yes'
87
+
self.rgAlpha = False
88
+
self.rgPosable = True
89
+
self.rgSpecular = 'Yes'
90
+
self.rgBump1Rep = False
91
+
self.rgBump2Rep = False
92
+
self.rgTexCount = 2
93
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP]
94
+
if self.renderGroupNum == 5:
95
+
self.rgShadding = 'Yes'
96
+
self.rgAlpha = False
97
+
self.rgPosable = True
98
+
self.rgSpecular = 'No'
99
+
self.rgBump1Rep = False
100
+
self.rgBump2Rep = False
101
+
self.rgTexCount = 1
102
+
self.rgTexType = [TextureType.DIFFUSE]
103
+
if self.renderGroupNum == 6:
104
+
self.rgShadding = 'Yes'
105
+
self.rgAlpha = True
106
+
self.rgPosable = True
107
+
self.rgSpecular = 'Yes'
108
+
self.rgBump1Rep = False
109
+
self.rgBump2Rep = False
110
+
self.rgTexCount = 2
111
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP]
112
+
if self.renderGroupNum == 7:
113
+
self.rgShadding = 'Yes'
114
+
self.rgAlpha = True
115
+
self.rgPosable = True
116
+
self.rgSpecular = 'No'
117
+
self.rgBump1Rep = False
118
+
self.rgBump2Rep = False
119
+
self.rgTexCount = 1
120
+
self.rgTexType = [TextureType.DIFFUSE]
121
+
if self.renderGroupNum == 8:
122
+
self.rgShadding = 'Yes'
123
+
self.rgAlpha = True
124
+
self.rgPosable = True
125
+
self.rgSpecular = 'Yes'
126
+
self.rgBump1Rep = False
127
+
self.rgBump2Rep = False
128
+
self.rgTexCount = 3
129
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.LIGHT, TextureType.BUMP]
130
+
if self.renderGroupNum == 9:
131
+
self.rgShadding = 'Yes'
132
+
self.rgAlpha = True
133
+
self.rgPosable = True
134
+
self.rgSpecular = 'No'
135
+
self.rgBump1Rep = False
136
+
self.rgBump2Rep = False
137
+
self.rgTexCount = 2
138
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.LIGHT]
139
+
if self.renderGroupNum == 10:
140
+
self.rgShadding = False
141
+
self.rgAlpha = False
142
+
self.rgPosable = True
143
+
self.rgSpecular = 'No'
144
+
self.rgBump1Rep = False
145
+
self.rgBump2Rep = False
146
+
self.rgTexCount = 1
147
+
self.rgTexType = [TextureType.DIFFUSE]
148
+
if self.renderGroupNum == 11:
149
+
self.rgShadding = 'Vertex'
150
+
self.rgAlpha = False
151
+
self.rgPosable = False
152
+
self.rgSpecular = 'Yes'
153
+
self.rgBump1Rep = False
154
+
self.rgBump2Rep = False
155
+
self.rgTexCount = 2
156
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP]
157
+
if self.renderGroupNum == 12:
158
+
self.rgShadding = 'Vertex'
159
+
self.rgAlpha = True
160
+
self.rgPosable = False
161
+
self.rgSpecular = 'Yes'
162
+
self.rgBump1Rep = False
163
+
self.rgBump2Rep = False
164
+
self.rgTexCount = 2
165
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP]
166
+
if self.renderGroupNum == 13:
167
+
self.rgShadding = False
168
+
self.rgAlpha = False
169
+
self.rgPosable = False
170
+
self.rgSpecular = 'No'
171
+
self.rgBump1Rep = False
172
+
self.rgBump2Rep = False
173
+
self.rgTexCount = 1
174
+
self.rgTexType = [TextureType.DIFFUSE]
175
+
if self.renderGroupNum == 14:
176
+
self.rgShadding = False
177
+
self.rgAlpha = False
178
+
self.rgPosable = False
179
+
self.rgSpecular = 'Yes'
180
+
self.rgBump1Rep = False
181
+
self.rgBump2Rep = False
182
+
self.rgTexCount = 2
183
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP]
184
+
if self.renderGroupNum == 15:
185
+
self.rgShadding = False
186
+
self.rgAlpha = True
187
+
self.rgPosable = False
188
+
self.rgSpecular = 'Yes'
189
+
self.rgBump1Rep = False
190
+
self.rgBump2Rep = False
191
+
self.rgTexCount = 2
192
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP]
193
+
if self.renderGroupNum == 16:
194
+
self.rgShadding = 'Yes'
195
+
self.rgAlpha = False
196
+
self.rgPosable = False
197
+
self.rgSpecular = 'No'
198
+
self.rgBump1Rep = False
199
+
self.rgBump2Rep = False
200
+
self.rgTexCount = 1
201
+
self.rgTexType = [TextureType.DIFFUSE]
202
+
if self.renderGroupNum == 17:
203
+
self.rgShadding = 'Yes'
204
+
self.rgAlpha = False
205
+
self.rgPosable = False
206
+
self.rgSpecular = 'No'
207
+
self.rgBump1Rep = False
208
+
self.rgBump2Rep = False
209
+
self.rgTexCount = 2
210
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.LIGHT]
211
+
if self.renderGroupNum == 18:
212
+
self.rgShadding = 'Yes'
213
+
self.rgAlpha = True
214
+
self.rgPosable = False
215
+
self.rgSpecular = 'No'
216
+
self.rgBump1Rep = False
217
+
self.rgBump2Rep = False
218
+
self.rgTexCount = 1
219
+
self.rgTexType = [TextureType.DIFFUSE]
220
+
if self.renderGroupNum == 19:
221
+
self.rgShadding = 'Yes'
222
+
self.rgAlpha = True
223
+
self.rgPosable = False
224
+
self.rgSpecular = 'No'
225
+
self.rgBump1Rep = False
226
+
self.rgBump2Rep = False
227
+
self.rgTexCount = 2
228
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.LIGHT]
229
+
if self.renderGroupNum == 20:
230
+
self.rgShadding = 'Yes'
231
+
self.rgAlpha = True
232
+
self.rgPosable = True
233
+
self.rgSpecular = 'Yes'
234
+
self.rgBump1Rep = True
235
+
self.rgBump2Rep = True
236
+
self.rgTexCount = 6
237
+
self.rgTexType = [
238
+
TextureType.DIFFUSE,
239
+
TextureType.LIGHT,
240
+
TextureType.BUMP,
241
+
TextureType.MASK,
242
+
TextureType.BUMP1,
243
+
TextureType.BUMP2]
244
+
if self.renderGroupNum == 21:
245
+
self.rgShadding = False
246
+
self.rgAlpha = True
247
+
self.rgPosable = True
248
+
self.rgSpecular = 'No'
249
+
self.rgBump1Rep = False
250
+
self.rgBump2Rep = False
251
+
self.rgTexCount = 1
252
+
self.rgTexType = [TextureType.DIFFUSE]
253
+
if self.renderGroupNum == 22:
254
+
self.rgShadding = 'Yes'
255
+
self.rgAlpha = False
256
+
self.rgPosable = True
257
+
self.rgSpecular = 'Yes'
258
+
self.rgBump1Rep = True
259
+
self.rgBump2Rep = True
260
+
self.rgTexCount = 7
261
+
self.rgTexType = [
262
+
TextureType.DIFFUSE,
263
+
TextureType.LIGHT,
264
+
TextureType.BUMP,
265
+
TextureType.MASK,
266
+
TextureType.BUMP1,
267
+
TextureType.BUMP2,
268
+
TextureType.SPECULAR]
269
+
if self.renderGroupNum == 23:
270
+
self.rgShadding = 'Yes'
271
+
self.rgAlpha = True
272
+
self.rgPosable = True
273
+
self.rgSpecular = 'Yes'
274
+
self.rgBump1Rep = True
275
+
self.rgBump2Rep = True
276
+
self.rgTexCount = 7
277
+
self.rgTexType = [
278
+
TextureType.DIFFUSE,
279
+
TextureType.LIGHT,
280
+
TextureType.BUMP,
281
+
TextureType.MASK,
282
+
TextureType.BUMP1,
283
+
TextureType.BUMP2,
284
+
TextureType.SPECULAR]
285
+
if self.renderGroupNum == 24:
286
+
self.rgShadding = 'Yes'
287
+
self.rgAlpha = False
288
+
self.rgPosable = True
289
+
self.rgSpecular = 'Yes'
290
+
self.rgBump1Rep = False
291
+
self.rgBump2Rep = False
292
+
self.rgTexCount = 4
293
+
self.rgTexType = [
294
+
TextureType.DIFFUSE,
295
+
TextureType.LIGHT,
296
+
TextureType.BUMP,
297
+
TextureType.SPECULAR]
298
+
if self.renderGroupNum == 25:
299
+
self.rgShadding = 'Yes'
300
+
self.rgAlpha = True
301
+
self.rgPosable = True
302
+
self.rgSpecular = 'Yes'
303
+
self.rgBump1Rep = False
304
+
self.rgBump2Rep = False
305
+
self.rgTexCount = 4
306
+
self.rgTexType = [
307
+
TextureType.DIFFUSE,
308
+
TextureType.LIGHT,
309
+
TextureType.BUMP,
310
+
TextureType.SPECULAR]
311
+
if self.renderGroupNum == 26:
312
+
self.rgShadding = 'Yes/No'
313
+
self.rgAlpha = False
314
+
self.rgPosable = True
315
+
self.rgSpecular = 'Yes intensity'
316
+
self.rgBump1Rep = False
317
+
self.rgBump2Rep = False
318
+
self.rgTexCount = 4
319
+
self.rgTexType = [
320
+
TextureType.DIFFUSE,
321
+
TextureType.BUMP,
322
+
TextureType.ENVIRONMENT,
323
+
TextureType.MASK]
324
+
if self.renderGroupNum == 27:
325
+
self.rgShadding = 'Yes/No'
326
+
self.rgAlpha = True
327
+
self.rgPosable = True
328
+
self.rgSpecular = 'Yes intensity'
329
+
self.rgBump1Rep = False
330
+
self.rgBump2Rep = False
331
+
self.rgTexCount = 4
332
+
self.rgTexType = [
333
+
TextureType.DIFFUSE,
334
+
TextureType.BUMP,
335
+
TextureType.ENVIRONMENT,
336
+
TextureType.MASK]
337
+
if self.renderGroupNum == 28:
338
+
self.rgShadding = 'Yes/No'
339
+
self.rgAlpha = False
340
+
self.rgPosable = True
341
+
self.rgSpecular = 'Yes intensity'
342
+
self.rgBump1Rep = True
343
+
self.rgBump2Rep = True
344
+
self.rgTexCount = 6
345
+
self.rgTexType = [
346
+
TextureType.DIFFUSE,
347
+
TextureType.BUMP,
348
+
TextureType.MASK,
349
+
TextureType.BUMP1,
350
+
TextureType.BUMP2,
351
+
TextureType.ENVIRONMENT]
352
+
if self.renderGroupNum == 29:
353
+
self.rgShadding = 'Yes/No'
354
+
self.rgAlpha = True
355
+
self.rgPosable = True
356
+
self.rgSpecular = 'Yes intensity'
357
+
self.rgBump1Rep = True
358
+
self.rgBump2Rep = True
359
+
self.rgTexCount = 6
360
+
self.rgTexType = [
361
+
TextureType.DIFFUSE,
362
+
TextureType.BUMP,
363
+
TextureType.MASK,
364
+
TextureType.BUMP1,
365
+
TextureType.BUMP2,
366
+
TextureType.ENVIRONMENT]
367
+
if self.renderGroupNum == 30:
368
+
self.rgShadding = 'Yes/No'
369
+
self.rgAlpha = False
370
+
self.rgPosable = True
371
+
self.rgSpecular = 'Yes intensity'
372
+
self.rgBump1Rep = False
373
+
self.rgBump2Rep = False
374
+
self.rgTexCount = 3
375
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP, TextureType.EMISSION]
376
+
if self.renderGroupNum == 31:
377
+
self.rgShadding = 'Yes/No'
378
+
self.rgAlpha = True
379
+
self.rgPosable = True
380
+
self.rgSpecular = 'Yes intensity'
381
+
self.rgBump1Rep = False
382
+
self.rgBump2Rep = False
383
+
self.rgTexCount = 3
384
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP, TextureType.EMISSION]
385
+
if self.renderGroupNum == 32:
386
+
self.rgShadding = 'Yes'
387
+
self.rgAlpha = False
388
+
self.rgPosable = True
389
+
self.rgSpecular = 'Yes'
390
+
self.rgBump1Rep = False
391
+
self.rgBump2Rep = False
392
+
self.rgTexCount = 1
393
+
self.rgTexType = [TextureType.DIFFUSE]
394
+
if self.renderGroupNum == 33:
395
+
self.rgShadding = 'Yes'
396
+
self.rgAlpha = True
397
+
self.rgPosable = True
398
+
self.rgSpecular = 'Yes'
399
+
self.rgBump1Rep = False
400
+
self.rgBump2Rep = False
401
+
self.rgTexCount = 1
402
+
self.rgTexType = [TextureType.DIFFUSE]
403
+
if self.renderGroupNum == 34:
404
+
self.rgTexType = [
405
+
TextureType.DIFFUSE,
406
+
TextureType.BUMP,
407
+
TextureType.MASK,
408
+
TextureType.SPECULAR]
409
+
if self.renderGroupNum == 35:
410
+
self.rgTexType = [
411
+
TextureType.DIFFUSE,
412
+
TextureType.BUMP,
413
+
TextureType.MASK,
414
+
TextureType.SPECULAR]
415
+
if self.renderGroupNum == 36:
416
+
self.rgShadding = 'Yes/No'
417
+
self.rgAlpha = False
418
+
self.rgPosable = True
419
+
self.rgSpecular = 'Yes intensity'
420
+
self.rgBump1Rep = True
421
+
self.rgBump2Rep = False
422
+
self.rgTexCount = 3
423
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP, TextureType.EMISSION]
424
+
if self.renderGroupNum == 37:
425
+
self.rgShadding = 'Yes/No'
426
+
self.rgAlpha = True
427
+
self.rgPosable = True
428
+
self.rgSpecular = 'Yes intensity'
429
+
self.rgBump1Rep = True
430
+
self.rgBump2Rep = False
431
+
self.rgTexCount = 3
432
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP, TextureType.EMISSION]
433
+
if self.renderGroupNum == 38:
434
+
self.rgShadding = 'Yes/No'
435
+
self.rgAlpha = False
436
+
self.rgPosable = True
437
+
self.rgSpecular = 'Yes intensity'
438
+
self.rgBump1Rep = True
439
+
self.rgBump2Rep = False
440
+
self.rgTexCount = 4
441
+
self.rgTexType = [
442
+
TextureType.DIFFUSE,
443
+
TextureType.BUMP,
444
+
TextureType.SPECULAR,
445
+
TextureType.EMISSION]
446
+
if self.renderGroupNum == 39:
447
+
self.rgShadding = 'Yes/No'
448
+
self.rgAlpha = True
449
+
self.rgPosable = True
450
+
self.rgSpecular = 'Yes intensity'
451
+
self.rgBump1Rep = True
452
+
self.rgBump2Rep = False
453
+
self.rgTexCount = 4
454
+
self.rgTexType = [
455
+
TextureType.DIFFUSE,
456
+
TextureType.BUMP,
457
+
TextureType.SPECULAR,
458
+
TextureType.EMISSION]
459
+
if self.renderGroupNum == 40:
460
+
self.rgShadding = 'Yes'
461
+
self.rgAlpha = False
462
+
self.rgPosable = True
463
+
self.rgSpecular = 'Yes'
464
+
self.rgBump1Rep = False
465
+
self.rgBump2Rep = False
466
+
self.rgTexCount = 3
467
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP, TextureType.SPECULAR]
468
+
if self.renderGroupNum == 41:
469
+
self.rgShadding = 'Yes'
470
+
self.rgAlpha = True
471
+
self.rgPosable = True
472
+
self.rgSpecular = 'Yes'
473
+
self.rgBump1Rep = False
474
+
self.rgBump2Rep = False
475
+
self.rgTexCount = 3
476
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP, TextureType.SPECULAR]
477
+
if self.renderGroupNum == 42:
478
+
self.rgShadding = 'Yes'
479
+
self.rgAlpha = False
480
+
self.rgPosable = True
481
+
self.rgSpecular = 'Yes'
482
+
self.rgBump1Rep = False
483
+
self.rgBump2Rep = False
484
+
self.rgSpec1Rep = True
485
+
self.rgTexCount = 3
486
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP, TextureType.SPECULAR]
487
+
if self.renderGroupNum == 43:
488
+
self.rgShadding = 'Yes'
489
+
self.rgAlpha = True
490
+
self.rgPosable = True
491
+
self.rgSpecular = 'Yes'
492
+
self.rgBump1Rep = False
493
+
self.rgBump2Rep = False
494
+
self.rgSpec1Rep = True
495
+
self.rgTexCount = 3
496
+
self.rgTexType = [TextureType.DIFFUSE, TextureType.BUMP, TextureType.SPECULAR]
497
+
498
+
499
+
def makeRenderType(meshFullName):
500
+
mat = meshFullName.split("_")
501
+
maxLen = 8
502
+
# Complete the array with None
503
+
mat = mat + [None] * (maxLen - len(mat))
504
+
505
+
renderType = RenderType()
506
+
507
+
renderGroupNum = 5
508
+
meshName = 'mesh'
509
+
specularity = 1
510
+
texRepeater1 = 0
511
+
texRepeater2 = 0
512
+
513
+
renderGroupFloat = ascii_ops.getFloat(mat[0])
514
+
# meshName = mat[1]
515
+
# specularityFloat = ascii_ops.getFloat(mat[2])
516
+
# texRepeater1Float = ascii_ops.getFloat(mat[3])
517
+
# texRepeater2Float = ascii_ops.getFloat(mat[4])
518
+
519
+
if math.isnan(renderGroupFloat):
520
+
meshName = mat[0]
521
+
specularityFloat = ascii_ops.getFloat(mat[1])
522
+
texRepeater1Float = ascii_ops.getFloat(mat[2])
523
+
texRepeater2Float = ascii_ops.getFloat(mat[3])
524
+
else:
525
+
renderGroupNum = int(renderGroupFloat)
526
+
meshName = mat[1]
527
+
specularityFloat = ascii_ops.getFloat(mat[2])
528
+
texRepeater1Float = ascii_ops.getFloat(mat[3])
529
+
texRepeater2Float = ascii_ops.getFloat(mat[4])
530
+
531
+
if specularityFloat and not math.isnan(specularityFloat):
532
+
specularity = specularityFloat
533
+
if texRepeater1Float and not math.isnan(texRepeater1Float):
534
+
texRepeater1 = texRepeater1Float
535
+
if texRepeater2Float and not math.isnan(texRepeater2Float):
536
+
texRepeater2 = texRepeater2Float
537
+
if mat[5]:
538
+
renderType.val4 = mat[5]
539
+
540
+
renderType.renderGroupNum = renderGroupNum
541
+
renderType.meshName = meshName
542
+
renderType.specularity = specularity
543
+
renderType.texRepeater1 = texRepeater1
544
+
renderType.texRepeater2 = texRepeater2
545
+
546
+
return renderType
547
+
548
+
549
+
def makeRenderTypeName(renderType):
550
+
nameList = []
551
+
552
+
if renderType.renderGroupNum:
553
+
nameList.append(str(renderType.renderGroupNum))
554
+
if renderType.meshName is not None:
555
+
nameList.append(renderType.meshName)
556
+
if renderType.specularity is not None:
557
+
nameList.append(str(renderType.specularity))
558
+
if renderType.texRepeater1 is not None:
559
+
nameList.append(str(renderType.texRepeater1))
560
+
if renderType.texRepeater2 is not None:
561
+
nameList.append(str(renderType.texRepeater2))
562
+
if renderType.val4 is not None:
563
+
nameList.append(str(renderType.val4))
564
+
565
+
name = "_".join(nameList)
566
+
return name
567
+
568
+
569
+
def texScaleOffset(scale):
570
+
offset = (scale / 2.0) - ((int(scale) - 1) // 2) - .5
571
+
return offset
572
+
573
+
574
+
def scaleTex(textureSlot, texScale):
575
+
textureSlot.scale = (texScale, texScale, 1)
576
+
offset = texScaleOffset(texScale)
577
+
textureSlot.offset = (offset, -offset, 1)
578
+
579
+
580
+
if __name__ == "__main__":
581
+
rt = RenderType()
582
+
xx = RenderGroup(rt)
583
+
print(xx.__dict__)
584
+
print(xx.rgTexType)
+140
xnalara_io_Tools/xps_panels.py
+140
xnalara_io_Tools/xps_panels.py
···
1
+
import bpy
2
+
3
+
4
+
class _XpsPanels():
5
+
"""All XPS panel inherit from this."""
6
+
7
+
bl_space_type = 'VIEW_3D'
8
+
bl_region_type = 'UI'
9
+
bl_category = 'XPS'
10
+
bl_context = 'objectmode'
11
+
12
+
13
+
class XPSToolsObjectPanel(_XpsPanels, bpy.types.Panel):
14
+
bl_idname = 'XPS_PT_xps_tools_object'
15
+
bl_label = 'XPS Tools'
16
+
17
+
def draw(self, context):
18
+
layout = self.layout
19
+
col = layout.column()
20
+
21
+
col.label(text='Import:')
22
+
# c = col.column()
23
+
r = col.row(align=True)
24
+
r1c1 = r.column(align=True)
25
+
r1c1.operator("xps_tools.import_model", text='Model', icon='NONE')
26
+
r1c2 = r.column(align=True)
27
+
r1c2.operator('xps_tools.import_pose', text='Pose')
28
+
29
+
# col.separator()
30
+
col = layout.column()
31
+
32
+
col.label(text="Export:")
33
+
c = col.column()
34
+
r = c.row(align=True)
35
+
r2c1 = r.column(align=True)
36
+
r2c1.operator('xps_tools.export_model', text='Model')
37
+
r2c2 = r.column(align=True)
38
+
r2c2.operator('xps_tools.export_pose', text='Pose')
39
+
40
+
41
+
class XPSToolsBonesPanel(_XpsPanels, bpy.types.Panel):
42
+
bl_idname = 'XPS_PT_xps_tools_bones'
43
+
bl_label = 'XPS Bones'
44
+
45
+
@classmethod
46
+
def poll(cls, context):
47
+
return bool(
48
+
next(
49
+
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
50
+
None))
51
+
52
+
def draw(self, context):
53
+
layout = self.layout
54
+
col = layout.column()
55
+
56
+
# col.separator()
57
+
col = layout.column()
58
+
59
+
col.label(text='Hide Bones:')
60
+
c = col.column(align=True)
61
+
r = c.row(align=True)
62
+
r.operator('xps_tools.bones_hide_by_name', text='Unused')
63
+
r.operator('xps_tools.bones_hide_by_vertex_group', text='Vertex Group')
64
+
r = c.row(align=True)
65
+
r.operator('xps_tools.bones_show_all', text='Show All')
66
+
67
+
# col.separator()
68
+
col = layout.column()
69
+
70
+
col.label(text='BoneDict:')
71
+
c = col.column(align=True)
72
+
r = c.row(align=True)
73
+
r.operator('xps_tools.bones_dictionary_generate', text='Generate BoneDict')
74
+
r = c.row(align=True)
75
+
r.operator('xps_tools.bones_dictionary_rename', text='Rename Bones')
76
+
r = c.row(align=True)
77
+
r.operator('xps_tools.bones_dictionary_restore_name', text='Restore Names')
78
+
79
+
# col.separator()
80
+
col = layout.column()
81
+
82
+
col.label(text='Rename Bones:')
83
+
c = col.column(align=True)
84
+
r = c.row(align=True)
85
+
r.operator('xps_tools.bones_rename_to_blender', text='XPS to Blender')
86
+
r = c.row(align=True)
87
+
r.operator('xps_tools.bones_rename_to_xps', text='Blender To XPS')
88
+
89
+
col = layout.column()
90
+
91
+
col.label(text='Connect Bones:')
92
+
c = col.column(align=True)
93
+
r = c.row(align=True)
94
+
r.operator(
95
+
'xps_tools.bones_connect',
96
+
text='Connect All').connectBones = True
97
+
r = c.row(align=True)
98
+
r.operator(
99
+
'xps_tools.bones_connect',
100
+
text='Disconnect All').connectBones = False
101
+
col.label(text='New Rest Pose:')
102
+
c = col.column(align=True)
103
+
r = c.row(align=True)
104
+
r.operator(
105
+
'xps_tools.new_rest_pose',
106
+
text='New Rest Pose')
107
+
108
+
109
+
class XPSToolsAnimPanel(_XpsPanels, bpy.types.Panel):
110
+
bl_idname = 'XPS_PT_xps_tools_anim'
111
+
bl_label = 'XPS Anim'
112
+
113
+
@classmethod
114
+
def poll(cls, context):
115
+
return bool(
116
+
next(
117
+
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
118
+
None))
119
+
120
+
def draw(self, context):
121
+
layout = self.layout
122
+
col = layout.column()
123
+
124
+
# col.separator()
125
+
col = layout.column()
126
+
127
+
col.label(text='Import:')
128
+
c = col.column(align=True)
129
+
r = c.row(align=True)
130
+
r.operator(
131
+
'xps_tools.import_poses_to_keyframes',
132
+
text='Poses to Keyframes')
133
+
134
+
# col.separator()
135
+
col = layout.column()
136
+
137
+
col.label(text='Export:')
138
+
c = col.column(align=True)
139
+
r = c.row(align=True)
140
+
r.operator('xps_tools.export_frames_to_poses', text='Frames to Poses')
+974
xnalara_io_Tools/xps_tools.py
+974
xnalara_io_Tools/xps_tools.py
···
1
+
import os
2
+
3
+
import bpy
4
+
import bpy.utils.previews as previews
5
+
from bpy_extras.io_utils import (ExportHelper, ImportHelper,
6
+
_check_axis_conversion, axis_conversion,
7
+
orientation_helper, path_reference_mode)
8
+
9
+
from . import (export_xnalara_model, export_xnalara_pose, import_xnalara_model,
10
+
import_xnalara_pose, material_creator, xps_types)
11
+
12
+
uv_x_displace = 0
13
+
uv_y_displace = 0
14
+
15
+
16
+
class CustomExportHelper(ExportHelper):
17
+
18
+
def check(self, context):
19
+
import os
20
+
change_ext = False
21
+
change_axis = _check_axis_conversion(self)
22
+
23
+
check_extension = self.check_extension
24
+
25
+
if check_extension is not None:
26
+
filepath = self.filepath
27
+
if os.path.basename(filepath):
28
+
filepath = bpy.path.ensure_ext(filepath,
29
+
self.filename_ext
30
+
if check_extension
31
+
else "")
32
+
33
+
if filepath != self.filepath:
34
+
35
+
head, tail = os.path.split(self.filepath)
36
+
filepath = os.path.splitext(tail)[0]
37
+
filepath = bpy.path.ensure_ext(filepath,
38
+
self.filename_ext
39
+
if check_extension
40
+
else "")
41
+
self.filepath = os.path.join(head, filepath)
42
+
change_ext = True
43
+
44
+
return (change_ext or change_axis)
45
+
46
+
47
+
class Import_Xps_Model_Op(bpy.types.Operator, ImportHelper):
48
+
"""Load an XNALara model File."""
49
+
50
+
bl_idname = "xps_tools.import_model"
51
+
bl_label = "Import XNALara/XPS Model"
52
+
bl_space_type = "PROPERTIES"
53
+
bl_region_type = "WINDOW"
54
+
bl_options = {'REGISTER', 'UNDO'}
55
+
56
+
filename_ext = ".mesh"
57
+
58
+
# List of operator properties, the attributes will be assigned
59
+
# to the class instance from the operator settings before calling.
60
+
61
+
# filter File Extension
62
+
filter_glob: bpy.props.StringProperty(
63
+
default="*.ascii;*.mesh;*.xps",
64
+
options={'HIDDEN'},
65
+
) # type:ignore
66
+
67
+
uvDisplX: bpy.props.IntProperty(
68
+
name="X",
69
+
description="Displace UV X axis",
70
+
default=uv_x_displace,
71
+
) # type:ignore
72
+
73
+
uvDisplY: bpy.props.IntProperty(
74
+
name="Y",
75
+
description="Displace UV Y axis",
76
+
default=uv_y_displace,
77
+
) # type:ignore
78
+
79
+
impDefPose: bpy.props.BoolProperty(
80
+
name="Default Pose",
81
+
description="Import Default Pose",
82
+
default=False,
83
+
) # type:ignore
84
+
85
+
markSeams: bpy.props.BoolProperty(
86
+
name="Mark Seams",
87
+
description="Mark as Seams the edged merged by the addon",
88
+
default=True,
89
+
) # type:ignore
90
+
91
+
vColors: bpy.props.BoolProperty(
92
+
name="Vertex Colors",
93
+
description="Import Vertex Colors",
94
+
default=True,
95
+
) # type:ignore
96
+
97
+
joinMeshRips: bpy.props.BoolProperty(
98
+
name="Merge Doubles by Normal",
99
+
description="Merge vertices with the same position and normal",
100
+
default=True,
101
+
) # type:ignore
102
+
103
+
joinMeshParts: bpy.props.BoolProperty(
104
+
name="Join MeshParts",
105
+
description="Join MeshParts (meshes that contain 'nPart!' in the name)",
106
+
default=True,
107
+
) # type:ignore
108
+
109
+
connectBones: bpy.props.BoolProperty(
110
+
name="Connect Bones",
111
+
description="Connect Bones all bones",
112
+
default=True,
113
+
) # type:ignore
114
+
115
+
autoIk: bpy.props.BoolProperty(
116
+
name="AutoIK",
117
+
description="Set AutoIK",
118
+
default=True,
119
+
) # type:ignore
120
+
121
+
importNormals: bpy.props.BoolProperty(
122
+
name="Import Normals",
123
+
description="Import Custom Normals",
124
+
default=True,
125
+
) # type:ignore
126
+
127
+
separate_optional_objects: bpy.props.BoolProperty(
128
+
name="Separate Optional Objects",
129
+
description="Separate into collection object marked as optional",
130
+
default=True
131
+
) # type:ignore
132
+
133
+
# Only needed if you want to add into a dynamic menu
134
+
def menu_func(self, context):
135
+
self.layout.operator_context = 'INVOKE_DEFAULT'
136
+
self.layout.operator(
137
+
Import_Xps_Model_Op.bl_idname,
138
+
text="Text Export Operator")
139
+
140
+
@classmethod
141
+
def poll(cls, context):
142
+
# Always can import
143
+
return True
144
+
145
+
def execute(self, context):
146
+
xpsSettings = xps_types.XpsImportSettings(
147
+
self.filepath,
148
+
self.uvDisplX,
149
+
self.uvDisplY,
150
+
self.impDefPose,
151
+
self.joinMeshRips,
152
+
self.joinMeshParts,
153
+
self.markSeams and self.joinMeshRips,
154
+
self.vColors,
155
+
self.connectBones,
156
+
self.autoIk,
157
+
self.importNormals,
158
+
self.separate_optional_objects
159
+
)
160
+
material_creator.create_group_nodes()
161
+
status = import_xnalara_model.getInputFilename(xpsSettings)
162
+
if status == '{NONE}':
163
+
# self.report({'DEBUG'}, "DEBUG File Format unrecognized")
164
+
# self.report({'INFO'}, "INFO File Format unrecognized")
165
+
# self.report({'OPERATOR'}, "OPERATOR File Format unrecognized")
166
+
# self.report({'WARNING'}, "WARNING File Format unrecognized")
167
+
# self.report({'ERROR'}, "ERROR File Format unrecognized")
168
+
self.report({'ERROR'}, "ERROR File Format unrecognized")
169
+
return {'FINISHED'}
170
+
171
+
def draw(self, context):
172
+
layout = self.layout
173
+
col = layout.column(align=True)
174
+
col.label(text='UV Displace')
175
+
col.prop(self, "uvDisplX")
176
+
col.prop(self, "uvDisplY")
177
+
178
+
col = layout.column(align=True)
179
+
col.label(text='Mesh')
180
+
col.prop(self, "joinMeshParts")
181
+
col.prop(self, "joinMeshRips")
182
+
col.prop(self, "separate_optional_objects")
183
+
184
+
sub = col.row()
185
+
col.prop(self, "importNormals")
186
+
sub.prop(self, "markSeams")
187
+
col.prop(self, "vColors")
188
+
189
+
sub.enabled = self.joinMeshRips
190
+
self.markSeams = self.joinMeshRips and self.markSeams
191
+
192
+
col = layout.column(align=True)
193
+
col.label(text='Armature')
194
+
col.prop(self, "impDefPose")
195
+
col.prop(self, "connectBones")
196
+
col.prop(self, "autoIk")
197
+
198
+
199
+
class Export_Xps_Model_Op(bpy.types.Operator, CustomExportHelper):
200
+
"""Save an XNALara model File."""
201
+
202
+
bl_idname = "xps_tools.export_model"
203
+
bl_label = "Export XNALara/XPS Model"
204
+
bl_space_type = "PROPERTIES"
205
+
bl_region_type = "WINDOW"
206
+
bl_options = {'REGISTER'}
207
+
208
+
filename_ext: bpy.props.EnumProperty(
209
+
name='Format',
210
+
description='Choose Export Format',
211
+
items=(
212
+
('.xps', 'XPS', 'Export as XPS Binary format (.xps)'),
213
+
('.mesh', 'MESH', 'Export as XnaLara/XPS Binary format (.mesh)'),
214
+
('.ascii', 'ASCII', 'Export as XnaLara/XPS Ascii format (.ascii)'),
215
+
),
216
+
default='.xps',
217
+
) # type:ignore
218
+
219
+
xps_version_mayor: bpy.props.EnumProperty(
220
+
name='FormatVersion',
221
+
description='Fixed 4 bone weights or unlimited formats',
222
+
items=(
223
+
('3', 'V3', 'Supports Unlimited Bone Weights (compatibli with XPS 1.8.9)'),
224
+
('2', 'V2', 'Supports 4 Bone Weights'),
225
+
),
226
+
default='3',
227
+
) # type:ignore
228
+
229
+
xps_version_minor: bpy.props.EnumProperty(
230
+
name='FormatVersionMinor',
231
+
description='Fixed 4 bone weights or unlimited formats',
232
+
items=(
233
+
('15', '15', 'XPS version minor'),
234
+
),
235
+
default='15',
236
+
options={'HIDDEN'},
237
+
) # type:ignore
238
+
239
+
# List of operator properties, the attributes will be assigned
240
+
# to the class instance from the operator settings before calling.
241
+
242
+
# filter File Extension
243
+
filter_glob: bpy.props.StringProperty(
244
+
default="*.ascii;*.mesh;*.xps",
245
+
options={'HIDDEN'},
246
+
) # type:ignore
247
+
248
+
uvDisplX: bpy.props.IntProperty(
249
+
name="X",
250
+
description="Displace UV X axis",
251
+
default=uv_x_displace,
252
+
) # type:ignore
253
+
254
+
uvDisplY: bpy.props.IntProperty(
255
+
name="Y",
256
+
description="Displace UV Y axis",
257
+
default=uv_y_displace,
258
+
) # type:ignore
259
+
260
+
expDefPose: bpy.props.BoolProperty(
261
+
name="Default Pose",
262
+
description="Export Default Pose",
263
+
default=False,
264
+
) # type:ignore
265
+
266
+
exportOnlySelected: bpy.props.BoolProperty(
267
+
name="Export Only Selected",
268
+
description="Export only selected objects",
269
+
default=True,
270
+
) # type:ignore
271
+
272
+
exportNormals: bpy.props.BoolProperty(
273
+
name="Export Normals",
274
+
description="Export Custom Normals",
275
+
default=True,
276
+
) # type:ignore
277
+
278
+
preserveSeams: bpy.props.BoolProperty(
279
+
name="Preserve Seams",
280
+
description="Split Edges marked as seams. They are marked as seams when imported back",
281
+
default=True,
282
+
) # type:ignore
283
+
284
+
vColors: bpy.props.BoolProperty(
285
+
name="Vertex Colors",
286
+
description="Export Vertex Colors",
287
+
default=True,
288
+
) # type:ignore
289
+
290
+
@classmethod
291
+
def poll(cls, context):
292
+
return bool(
293
+
next(
294
+
(obj for obj in context.selected_objects if obj.type == 'MESH'),
295
+
None))
296
+
297
+
def execute(self, context):
298
+
xpsSettings = xps_types.XpsExportSettings(
299
+
filename=self.filepath,
300
+
format=self.filename_ext,
301
+
uvDisplX=self.uvDisplX,
302
+
uvDisplY=self.uvDisplY,
303
+
exportOnlySelected=self.exportOnlySelected,
304
+
expDefPose=self.expDefPose,
305
+
preserveSeams=self.preserveSeams,
306
+
vColors=self.vColors,
307
+
exportNormals=self.exportNormals,
308
+
versionMayor=int(self.xps_version_mayor),
309
+
versionMinor=int(self.xps_version_minor),
310
+
)
311
+
export_xnalara_model.getOutputFilename(xpsSettings)
312
+
return {'FINISHED'}
313
+
314
+
def draw(self, context):
315
+
layout = self.layout
316
+
317
+
layout.prop(self, "exportOnlySelected")
318
+
319
+
layout.label(text="File Format:")
320
+
layout.prop(self, "filename_ext", expand=True)
321
+
if (self.filename_ext == '.xps'):
322
+
layout.prop(self, "xps_version_mayor", expand=True)
323
+
324
+
col = layout.column(align=True)
325
+
col.label(text='Mesh')
326
+
col.prop(self, "preserveSeams")
327
+
col.prop(self, "exportNormals")
328
+
col.prop(self, "vColors")
329
+
330
+
col = layout.column(align=True)
331
+
col.label(text='UV Displace')
332
+
col.prop(self, "uvDisplX")
333
+
col.prop(self, "uvDisplY")
334
+
335
+
layout.prop(self, "expDefPose")
336
+
337
+
338
+
class Import_Xps_Pose_Op(bpy.types.Operator, ImportHelper):
339
+
"""Load an XNALara pose File."""
340
+
341
+
bl_idname = "xps_tools.import_pose"
342
+
bl_label = "Import XNALara/XPS Pose"
343
+
bl_space_type = "PROPERTIES"
344
+
bl_region_type = "WINDOW"
345
+
bl_options = {'REGISTER', 'UNDO'}
346
+
347
+
filename_ext = '.pose'
348
+
349
+
# List of operator properties, the attributes will be assigned
350
+
# to the class instance from the operator settings before calling.
351
+
352
+
# filter File Extension
353
+
filter_glob: bpy.props.StringProperty(
354
+
default="*.pose",
355
+
options={'HIDDEN'},
356
+
) # type:ignore
357
+
358
+
@classmethod
359
+
def poll(cls, context):
360
+
return context.active_object and context.active_object.type == 'ARMATURE'
361
+
362
+
def execute(self, context):
363
+
import_xnalara_pose.getInputFilename(self.filepath)
364
+
return {'FINISHED'}
365
+
366
+
367
+
class Export_Xps_Pose_Op(bpy.types.Operator, ExportHelper):
368
+
"""Save an XNALara pose File."""
369
+
370
+
bl_idname = "xps_tools.export_pose"
371
+
bl_label = "Export XNALara/XPS Pose"
372
+
bl_space_type = "PROPERTIES"
373
+
bl_region_type = "WINDOW"
374
+
bl_options = {'REGISTER'}
375
+
376
+
filename_ext = '.pose'
377
+
378
+
# List of operator properties, the attributes will be assigned
379
+
# to the class instance from the operator settings before calling.
380
+
381
+
# filter File Extension
382
+
filter_glob: bpy.props.StringProperty(
383
+
default="*.pose",
384
+
options={'HIDDEN'},
385
+
) # type:ignore
386
+
387
+
@classmethod
388
+
def poll(cls, context):
389
+
return context.active_object and context.active_object.type == 'ARMATURE'
390
+
391
+
def execute(self, context):
392
+
export_xnalara_pose.getOutputFilename(self.filepath)
393
+
return {'FINISHED'}
394
+
395
+
396
+
class Import_Poses_To_Keyframes_Op(bpy.types.Operator, ImportHelper):
397
+
"""Load a sequence of posese as keyframes."""
398
+
399
+
bl_idname = "xps_tools.import_poses_to_keyframes"
400
+
bl_label = "Import poses to keyframes"
401
+
bl_space_type = "PROPERTIES"
402
+
bl_region_type = "WINDOW"
403
+
bl_options = {'REGISTER', 'UNDO'}
404
+
405
+
filename_ext = '.pose'
406
+
407
+
# List of operator properties, the attributes will be assigned
408
+
# to the class instance from the operator settings before calling.
409
+
410
+
# filter File Extension
411
+
filter_glob: bpy.props.StringProperty(
412
+
default="*.pose",
413
+
options={'HIDDEN'},
414
+
) # type:ignore
415
+
416
+
@classmethod
417
+
def poll(cls, context):
418
+
return context.active_object and context.active_object.type == 'ARMATURE'
419
+
420
+
def execute(self, context):
421
+
import_xnalara_pose.getInputPoseSequence(self.filepath)
422
+
return {'FINISHED'}
423
+
424
+
425
+
class Export_Frames_To_Poses_Op(bpy.types.Operator, CustomExportHelper):
426
+
"""Save frames as poses."""
427
+
428
+
bl_idname = "xps_tools.export_frames_to_poses"
429
+
bl_label = "Export frames to poses"
430
+
bl_space_type = "PROPERTIES"
431
+
bl_region_type = "WINDOW"
432
+
bl_options = {'REGISTER'}
433
+
434
+
filename_ext = '.pose'
435
+
436
+
# List of operator properties, the attributes will be assigned
437
+
# to the class instance from the operator settings before calling.
438
+
439
+
# filter File Extension
440
+
filter_glob: bpy.props.StringProperty(
441
+
default="*.pose",
442
+
options={'HIDDEN'},
443
+
) # type:ignore
444
+
445
+
@classmethod
446
+
def poll(cls, context):
447
+
return context.active_object and context.active_object.type == 'ARMATURE'
448
+
449
+
def execute(self, context):
450
+
export_xnalara_pose.getOutputPoseSequence(self.filepath)
451
+
return {'FINISHED'}
452
+
453
+
454
+
class ArmatureBoneDictGenerate_Op(bpy.types.Operator):
455
+
"""Generate a BoneDict from armature."""
456
+
457
+
bl_idname = 'xps_tools.bones_dictionary_generate'
458
+
bl_label = 'Generate BoneDict'
459
+
bl_description = 'Generate a BoneDict from active armature'
460
+
bl_space_type = "PROPERTIES"
461
+
bl_region_type = "WINDOW"
462
+
bl_options = {'REGISTER'}
463
+
464
+
filename_ext = '.txt'
465
+
check_extension = True
466
+
467
+
# List of operator properties, the attributes will be assigned
468
+
# to the class instance from the operator settings before calling.
469
+
filepath: bpy.props.StringProperty(
470
+
name="File Path",
471
+
description="Bone Dictionary File",
472
+
maxlen=1024,
473
+
subtype='FILE_PATH',
474
+
) # type:ignore
475
+
476
+
# filter File Extension
477
+
filter_glob: bpy.props.StringProperty(
478
+
default="*.txt",
479
+
options={'HIDDEN'},
480
+
) # type:ignore
481
+
482
+
@classmethod
483
+
def poll(cls, context):
484
+
if context.active_object:
485
+
return context.active_object.type == 'ARMATURE'
486
+
487
+
def execute(self, context):
488
+
armatureObj = context.active_object
489
+
export_xnalara_model.boneDictGenerate(self.filepath, armatureObj)
490
+
return {'FINISHED'}
491
+
492
+
def invoke(self, context, event):
493
+
if not self.filepath:
494
+
self.filepath = 'BoneDict.txt'
495
+
context.window_manager.fileselect_add(self)
496
+
return {'RUNNING_MODAL'}
497
+
498
+
def check(self, context):
499
+
import os
500
+
change_ext = False
501
+
check_extension = self.check_extension
502
+
503
+
if check_extension is not None:
504
+
filepath = self.filepath
505
+
if os.path.basename(filepath):
506
+
filepath = bpy.path.ensure_ext(filepath,
507
+
self.filename_ext
508
+
if check_extension
509
+
else "")
510
+
511
+
if filepath != self.filepath:
512
+
self.filepath = filepath
513
+
change_ext = True
514
+
515
+
return (change_ext)
516
+
517
+
518
+
class ArmatureBoneDictRename_Op(bpy.types.Operator):
519
+
bl_idname = 'xps_tools.bones_dictionary_rename'
520
+
bl_label = 'Dictionary Rename'
521
+
bl_description = 'Use BoneDict to Rename Bones'
522
+
bl_space_type = "PROPERTIES"
523
+
bl_region_type = "WINDOW"
524
+
bl_options = {'REGISTER', 'UNDO'}
525
+
526
+
filename_ext = '.txt'
527
+
check_extension = True
528
+
529
+
# List of operator properties, the attributes will be assigned
530
+
# to the class instance from the operator settings before calling.
531
+
filepath: bpy.props.StringProperty(
532
+
name="File Path",
533
+
description="Bone Dictionary File",
534
+
maxlen=1024,
535
+
subtype='FILE_PATH',
536
+
) # type:ignore
537
+
538
+
# filter File Extension
539
+
filter_glob: bpy.props.StringProperty(
540
+
default="*.txt",
541
+
options={'HIDDEN'},
542
+
) # type:ignore
543
+
544
+
@classmethod
545
+
def poll(cls, context):
546
+
return bool(
547
+
next(
548
+
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
549
+
None))
550
+
551
+
def execute(self, context):
552
+
armatureObj = next((obj for obj in context.selected_objects if obj.type == 'ARMATURE'), None)
553
+
import_xnalara_model.boneDictRename(self.filepath, armatureObj)
554
+
return {'FINISHED'}
555
+
556
+
def invoke(self, context, event):
557
+
if not self.filepath:
558
+
self.filepath = 'BoneDict.txt'
559
+
context.window_manager.fileselect_add(self)
560
+
return {'RUNNING_MODAL'}
561
+
562
+
def check(self, context):
563
+
import os
564
+
change_ext = False
565
+
check_extension = self.check_extension
566
+
567
+
if check_extension is not None:
568
+
filepath = self.filepath
569
+
if os.path.basename(filepath):
570
+
filepath = bpy.path.ensure_ext(filepath,
571
+
self.filename_ext
572
+
if check_extension
573
+
else "")
574
+
575
+
if filepath != self.filepath:
576
+
self.filepath = filepath
577
+
change_ext = True
578
+
579
+
return (change_ext)
580
+
581
+
582
+
class ArmatureBoneDictRestore_Op(bpy.types.Operator):
583
+
bl_idname = 'xps_tools.bones_dictionary_restore_name'
584
+
bl_label = 'Dictionary Restore Names'
585
+
bl_description = 'Use BoneDict to Restore Bone Names'
586
+
bl_space_type = "PROPERTIES"
587
+
bl_region_type = "WINDOW"
588
+
bl_options = {'REGISTER', 'UNDO'}
589
+
590
+
filename_ext = '.txt'
591
+
check_extension = True
592
+
593
+
# List of operator properties, the attributes will be assigned
594
+
# to the class instance from the operator settings before calling.
595
+
filepath: bpy.props.StringProperty(
596
+
name="File Path",
597
+
description="Bone Dictionary File",
598
+
maxlen=1024,
599
+
subtype='FILE_PATH',
600
+
) # type:ignore
601
+
602
+
# filter File Extension
603
+
filter_glob: bpy.props.StringProperty(
604
+
default="*.txt",
605
+
options={'HIDDEN'},
606
+
) # type:ignore
607
+
608
+
@classmethod
609
+
def poll(cls, context):
610
+
return bool(
611
+
next(
612
+
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
613
+
None))
614
+
615
+
def execute(self, context):
616
+
armatureObj = next((obj for obj in context.selected_objects if obj.type == 'ARMATURE'), None)
617
+
import_xnalara_model.boneDictRestore(self.filepath, armatureObj)
618
+
return {'FINISHED'}
619
+
620
+
def invoke(self, context, event):
621
+
if not self.filepath:
622
+
self.filepath = 'BoneDict.txt'
623
+
context.window_manager.fileselect_add(self)
624
+
return {'RUNNING_MODAL'}
625
+
626
+
def check(self, context):
627
+
import os
628
+
change_ext = False
629
+
check_extension = self.check_extension
630
+
631
+
if check_extension is not None:
632
+
filepath = self.filepath
633
+
if os.path.basename(filepath):
634
+
filepath = bpy.path.ensure_ext(filepath,
635
+
self.filename_ext
636
+
if check_extension
637
+
else "")
638
+
639
+
if filepath != self.filepath:
640
+
self.filepath = filepath
641
+
change_ext = True
642
+
643
+
return (change_ext)
644
+
645
+
646
+
@orientation_helper(axis_forward='-Z', axis_up='Y')
647
+
class ImportXpsNgff(bpy.types.Operator, ImportHelper):
648
+
"""Load a Wavefront OBJ File."""
649
+
650
+
bl_idname = "import_xps_ngff.obj"
651
+
bl_label = "Import XPS NGFF"
652
+
bl_options = {'PRESET', 'UNDO'}
653
+
654
+
filename_ext = ".obj"
655
+
filter_glob: bpy.props.StringProperty(
656
+
default="*.obj;*.mtl;*.arl",
657
+
options={'HIDDEN'},
658
+
) # type:ignore
659
+
660
+
use_edges: bpy.props.BoolProperty(
661
+
name="Lines",
662
+
description="Import lines and faces with 2 verts as edge",
663
+
default=True,
664
+
) # type:ignore
665
+
666
+
use_smooth_groups: bpy.props.BoolProperty(
667
+
name="Smooth Groups",
668
+
description="Surround smooth groups by sharp edges",
669
+
default=True,
670
+
) # type:ignore
671
+
672
+
use_split_objects: bpy.props.BoolProperty(
673
+
name="Object",
674
+
description="Import OBJ Objects into Blender Objects",
675
+
default=True,
676
+
) # type:ignore
677
+
678
+
use_split_groups: bpy.props.BoolProperty(
679
+
name="Group",
680
+
description="Import OBJ Groups into Blender Objects",
681
+
default=True,
682
+
) # type:ignore
683
+
684
+
use_groups_as_vgroups: bpy.props.BoolProperty(
685
+
name="Poly Groups",
686
+
description="Import OBJ groups as vertex groups",
687
+
default=False,
688
+
) # type:ignore
689
+
690
+
use_image_search: bpy.props.BoolProperty(
691
+
name="Image Search",
692
+
description="Search subdirs for any associated images "
693
+
"(Warning, may be slow)",
694
+
default=True,
695
+
) # type:ignore
696
+
697
+
split_mode: bpy.props.EnumProperty(
698
+
name="Split",
699
+
items=(
700
+
('ON', "Split", "Split geometry, omits unused verts"),
701
+
('OFF', "Keep Vert Order", "Keep vertex order from file"),
702
+
)
703
+
) # type:ignore
704
+
705
+
global_clamp_size: bpy.props.FloatProperty(
706
+
name="Clamp Size",
707
+
description="Clamp bounds under this value (zero to disable)",
708
+
min=0.0, max=1000.0,
709
+
soft_min=0.0, soft_max=1000.0,
710
+
default=0.0,
711
+
) # type:ignore
712
+
713
+
def execute(self, context):
714
+
# print("Selected: " + context.active_object.name)
715
+
from . import import_obj
716
+
717
+
if self.split_mode == 'OFF':
718
+
self.use_split_objects = False
719
+
self.use_split_groups = False
720
+
else:
721
+
self.use_groups_as_vgroups = False
722
+
723
+
keywords = self.as_keywords(ignore=("axis_forward",
724
+
"axis_up",
725
+
"filter_glob",
726
+
"split_mode",
727
+
))
728
+
729
+
global_matrix = axis_conversion(from_forward=self.axis_forward,
730
+
from_up=self.axis_up,
731
+
).to_4x4()
732
+
keywords["global_matrix"] = global_matrix
733
+
734
+
if bpy.data.is_saved and context.user_preferences.filepaths.use_relative_paths:
735
+
import os
736
+
keywords["relpath"] = os.path.dirname(bpy.data.filepath)
737
+
738
+
return import_obj.load(context, **keywords)
739
+
740
+
def draw(self, context):
741
+
layout = self.layout
742
+
743
+
row = layout.row(align=True)
744
+
row.prop(self, "use_smooth_groups")
745
+
row.prop(self, "use_edges")
746
+
747
+
box = layout.box()
748
+
row = box.row()
749
+
row.prop(self, "split_mode", expand=True)
750
+
751
+
row = box.row()
752
+
if self.split_mode == 'ON':
753
+
row.label(text="Split by:")
754
+
row.prop(self, "use_split_objects")
755
+
row.prop(self, "use_split_groups")
756
+
else:
757
+
row.prop(self, "use_groups_as_vgroups")
758
+
759
+
row = layout.split(percentage=0.67)
760
+
row.prop(self, "global_clamp_size")
761
+
layout.prop(self, "axis_forward")
762
+
layout.prop(self, "axis_up")
763
+
764
+
layout.prop(self, "use_image_search")
765
+
766
+
767
+
@orientation_helper(axis_forward='-Z', axis_up='Y')
768
+
class ExportXpsNgff(bpy.types.Operator, ExportHelper):
769
+
"""Save a Wavefront OBJ File."""
770
+
771
+
bl_idname = "export_xps_ngff.obj"
772
+
bl_label = 'Export XPS NGFF'
773
+
bl_options = {'PRESET'}
774
+
775
+
filename_ext = ".obj"
776
+
filter_glob: bpy.props.StringProperty(
777
+
default="*.obj;*.mtl;*.arl",
778
+
options={'HIDDEN'},
779
+
) # type:ignore
780
+
781
+
# context group
782
+
use_selection: bpy.props.BoolProperty(
783
+
name="Selection Only",
784
+
description="Export selected objects only",
785
+
default=False,
786
+
) # type:ignore
787
+
use_animation: bpy.props.BoolProperty(
788
+
name="Animation",
789
+
description="Write out an OBJ for each frame",
790
+
default=False,
791
+
) # type:ignore
792
+
793
+
# object group
794
+
use_mesh_modifiers: bpy.props.BoolProperty(
795
+
name="Apply Modifiers",
796
+
description="Apply modifiers (preview resolution)",
797
+
default=True,
798
+
) # type:ignore
799
+
800
+
# extra data group
801
+
use_edges: bpy.props.BoolProperty(
802
+
name="Include Edges",
803
+
description="",
804
+
default=True,
805
+
) # type:ignore
806
+
use_smooth_groups: bpy.props.BoolProperty(
807
+
name="Smooth Groups",
808
+
description="Write sharp edges as smooth groups",
809
+
default=False,
810
+
) # type:ignore
811
+
use_smooth_groups_bitflags: bpy.props.BoolProperty(
812
+
name="Bitflag Smooth Groups",
813
+
description="Same as 'Smooth Groups', but generate smooth groups IDs as bitflags "
814
+
"(produces at most 32 different smooth groups, usually much less)",
815
+
default=False,
816
+
) # type:ignore
817
+
use_normals: bpy.props.BoolProperty(
818
+
name="Write Normals",
819
+
description="Export one normal per vertex and per face, to represent flat faces and sharp edges",
820
+
default=True,
821
+
) # type:ignore
822
+
use_vcolors: bpy.props.BoolProperty(
823
+
name="Write Vert Colors",
824
+
description="Export Vertex Color",
825
+
default=True,
826
+
) # type:ignore
827
+
use_uvs: bpy.props.BoolProperty(
828
+
name="Include UVs",
829
+
description="Write out the active UV coordinates",
830
+
default=True,
831
+
) # type:ignore
832
+
use_materials: bpy.props.BoolProperty(
833
+
name="Write Materials",
834
+
description="Write out the MTL file",
835
+
default=True,
836
+
) # type:ignore
837
+
use_triangles: bpy.props.BoolProperty(
838
+
name="Triangulate Faces",
839
+
description="Convert all faces to triangles",
840
+
default=False,
841
+
) # type:ignore
842
+
use_nurbs: bpy.props.BoolProperty(
843
+
name="Write Nurbs",
844
+
description="Write nurbs curves as OBJ nurbs rather than "
845
+
"converting to geometry",
846
+
default=False,
847
+
) # type:ignore
848
+
use_vertex_groups: bpy.props.BoolProperty(
849
+
name="Polygroups",
850
+
description="",
851
+
default=False,
852
+
) # type:ignore
853
+
854
+
# grouping group
855
+
use_blen_objects: bpy.props.BoolProperty(
856
+
name="Objects as OBJ Objects",
857
+
description="",
858
+
default=True,
859
+
) # type:ignore
860
+
group_by_object: bpy.props.BoolProperty(
861
+
name="Objects as OBJ Groups ",
862
+
description="",
863
+
default=False,
864
+
) # type:ignore
865
+
group_by_material: bpy.props.BoolProperty(
866
+
name="Material Groups",
867
+
description="",
868
+
default=False,
869
+
) # type:ignore
870
+
keep_vertex_order: bpy.props.BoolProperty(
871
+
name="Keep Vertex Order",
872
+
description="",
873
+
default=False,
874
+
) # type:ignore
875
+
global_scale: bpy.props.FloatProperty(
876
+
name="Scale",
877
+
min=0.01, max=1000.0,
878
+
default=1.0,
879
+
) # type:ignore
880
+
881
+
path_mode = path_reference_mode
882
+
883
+
check_extension = True
884
+
885
+
def execute(self, context):
886
+
from mathutils import Matrix
887
+
888
+
from . import export_obj
889
+
keywords = self.as_keywords(ignore=("axis_forward",
890
+
"axis_up",
891
+
"global_scale",
892
+
"check_existing",
893
+
"filter_glob",
894
+
))
895
+
896
+
global_matrix = (Matrix.Scale(self.global_scale, 4)
897
+
* axis_conversion(to_forward=self.axis_forward,
898
+
to_up=self.axis_up
899
+
).to_4x4())
900
+
901
+
keywords["global_matrix"] = global_matrix
902
+
return export_obj.save(context, **keywords)
903
+
904
+
905
+
class XpsImportSubMenu(bpy.types.Menu):
906
+
bl_idname = "OBJECT_MT_xnalara_import_submenu"
907
+
bl_label = "XNALara / XPS"
908
+
909
+
def draw(self, context):
910
+
layout = self.layout
911
+
layout.operator(Import_Xps_Model_Op.bl_idname, text="XNALara/XPS Model (.ascii/.mesh/.xps)")
912
+
layout.operator(Import_Xps_Pose_Op.bl_idname, text="XNALara/XPS Pose (.pose)")
913
+
layout.operator(ImportXpsNgff.bl_idname, text="XPS NGFF (.obj)")
914
+
915
+
916
+
class XpsExportSubMenu(bpy.types.Menu):
917
+
bl_idname = "OBJECT_MT_xnalara_export_submenu"
918
+
bl_label = "XNALara / XPS"
919
+
920
+
def draw(self, context):
921
+
layout = self.layout
922
+
layout.operator(Export_Xps_Model_Op.bl_idname, text="XNALara/XPS Model (.ascii/.mesh/.xps)")
923
+
layout.operator(Export_Xps_Pose_Op.bl_idname, text="XNALara/XPS Pose (.pose)")
924
+
layout.operator(ExportXpsNgff.bl_idname, text="XPS NGFF (.obj)")
925
+
926
+
927
+
#
928
+
# Registration
929
+
#
930
+
def menu_func_import(self, context):
931
+
my_icon = custom_icons["main"]["xps_icon"]
932
+
self.layout.menu(XpsImportSubMenu.bl_idname, icon_value=my_icon.icon_id)
933
+
934
+
935
+
def menu_func_export(self, context):
936
+
my_icon = custom_icons["main"]["xps_icon"]
937
+
self.layout.menu(XpsExportSubMenu.bl_idname, icon_value=my_icon.icon_id)
938
+
939
+
940
+
# --------------------------------------------------------------------------------
941
+
# Custom Icons
942
+
# --------------------------------------------------------------------------------
943
+
custom_icons = {}
944
+
945
+
946
+
def registerCustomIcon():
947
+
948
+
pcoll = previews.new()
949
+
script_path = os.path.dirname(__file__)
950
+
icons_dir = os.path.join(script_path, "icons")
951
+
pcoll.load("xps_icon", os.path.join(icons_dir, "icon.png"), 'IMAGE')
952
+
custom_icons["main"] = pcoll
953
+
954
+
955
+
def unregisterCustomIcon():
956
+
for pcoll in custom_icons.values():
957
+
previews.remove(pcoll)
958
+
custom_icons.clear()
959
+
960
+
961
+
def register():
962
+
bpy.types.TOPBAR_MT_file_import.append(menu_func_import)
963
+
bpy.types.TOPBAR_MT_file_export.append(menu_func_export)
964
+
registerCustomIcon()
965
+
966
+
967
+
def unregister():
968
+
bpy.types.TOPBAR_MT_file_import.remove(menu_func_import)
969
+
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
970
+
unregisterCustomIcon()
971
+
972
+
973
+
if __name__ == "__main__":
974
+
register()
+199
xnalara_io_Tools/xps_toolshelf.py
+199
xnalara_io_Tools/xps_toolshelf.py
···
1
+
import bpy
2
+
3
+
from . import import_xnalara_model, import_xnalara_pose
4
+
5
+
6
+
class ArmatureBonesHideByName_Op(bpy.types.Operator):
7
+
bl_idname = 'xps_tools.bones_hide_by_name'
8
+
bl_label = 'Hide bones by name'
9
+
bl_description = 'Move bones starting with "unused" to the armature layer 2'
10
+
bl_options = {'PRESET'}
11
+
12
+
@classmethod
13
+
def poll(cls, context):
14
+
return bool(
15
+
next(
16
+
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
17
+
None))
18
+
19
+
def execute(self, context):
20
+
import_xnalara_model.hideBonesByName(self.armature_objs)
21
+
return {'FINISHED'}
22
+
23
+
def invoke(self, context, event):
24
+
self.armature_objs = [
25
+
obj for obj in context.selected_objects if obj.type == 'ARMATURE']
26
+
return self.execute(context)
27
+
28
+
def check(self, context):
29
+
print('CHECK')
30
+
return {'RUNNING_MODAL'}
31
+
32
+
33
+
class ArmatureBonesHideByVertexGroup_Op(bpy.types.Operator):
34
+
bl_idname = 'xps_tools.bones_hide_by_vertex_group'
35
+
bl_label = 'Hide bones by weight'
36
+
bl_description = 'Move bones that do not alter any mesh to the armature layer 2'
37
+
bl_options = {'PRESET'}
38
+
39
+
@classmethod
40
+
def poll(cls, context):
41
+
return bool(
42
+
next(
43
+
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
44
+
None))
45
+
46
+
def execute(self, context):
47
+
import_xnalara_model.hideBonesByVertexGroup(self.armature_objs)
48
+
return {'FINISHED'}
49
+
50
+
def invoke(self, context, event):
51
+
self.armature_objs = [
52
+
obj for obj in context.selected_objects if obj.type == 'ARMATURE']
53
+
return self.execute(context)
54
+
55
+
def check(self, context):
56
+
print('CHECK')
57
+
return {'RUNNING_MODAL'}
58
+
59
+
60
+
class ArmatureBonesShowAll_Op(bpy.types.Operator):
61
+
bl_idname = 'xps_tools.bones_show_all'
62
+
bl_label = 'Show all Bones'
63
+
bl_description = 'Move all bones to the armature layer 1'
64
+
bl_options = {'PRESET'}
65
+
66
+
@classmethod
67
+
def poll(cls, context):
68
+
return bool(
69
+
next(
70
+
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
71
+
None))
72
+
73
+
def execute(self, context):
74
+
import_xnalara_model.showAllBones(self.armature_objs)
75
+
return {'FINISHED'}
76
+
77
+
def invoke(self, context, event):
78
+
self.armature_objs = [
79
+
obj for obj in context.selected_objects if obj.type == 'ARMATURE']
80
+
return self.execute(context)
81
+
82
+
def check(self, context):
83
+
print('CHECK')
84
+
return {'RUNNING_MODAL'}
85
+
86
+
87
+
class ArmatureBonesRenameToBlender_Op(bpy.types.Operator):
88
+
bl_idname = 'xps_tools.bones_rename_to_blender'
89
+
bl_label = 'Rename Bones'
90
+
bl_description = 'Rename bones to Blender bone name convention (left -> .L)'
91
+
bl_options = {'PRESET'}
92
+
93
+
@classmethod
94
+
def poll(cls, context):
95
+
return bool(
96
+
next(
97
+
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
98
+
None))
99
+
100
+
def execute(self, context):
101
+
armatures_obs = filter(
102
+
lambda obj: obj.type == 'ARMATURE',
103
+
context.selected_objects)
104
+
import_xnalara_pose.renameBonesToBlender(armatures_obs)
105
+
return {'FINISHED'}
106
+
107
+
108
+
class ArmatureBonesRenameToXps_Op(bpy.types.Operator):
109
+
bl_idname = 'xps_tools.bones_rename_to_xps'
110
+
bl_label = 'Rename Bones'
111
+
bl_description = 'Rename bones back to XPS (.L -> left)'
112
+
bl_options = {'PRESET'}
113
+
114
+
@classmethod
115
+
def poll(cls, context):
116
+
return bool(
117
+
next(
118
+
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
119
+
None))
120
+
121
+
def execute(self, context):
122
+
armatures_obs = filter(
123
+
lambda obj: obj.type == 'ARMATURE',
124
+
context.selected_objects)
125
+
import_xnalara_pose.renameBonesToXps(armatures_obs)
126
+
return {'FINISHED'}
127
+
128
+
129
+
class ArmatureBonesConnect_Op(bpy.types.Operator):
130
+
bl_idname = 'xps_tools.bones_connect'
131
+
bl_label = 'Set Bones Connection'
132
+
bl_description = 'Set Bones Connection'
133
+
bl_options = {'PRESET'}
134
+
135
+
connectBones: bpy.props.BoolProperty() # type:ignore
136
+
137
+
@classmethod
138
+
def poll(cls, context):
139
+
return bool(
140
+
next(
141
+
(obj for obj in context.selected_objects if obj.type == 'ARMATURE'),
142
+
None))
143
+
144
+
def execute(self, context):
145
+
armatures_obs = filter(
146
+
lambda obj: obj.type == 'ARMATURE',
147
+
context.selected_objects)
148
+
activeObj = bpy.context.active_object
149
+
for armature_ob in armatures_obs:
150
+
bpy.context.view_layer.objects.active = armature_ob
151
+
import_xnalara_model.setBoneConnect(self.connectBones)
152
+
bpy.context.view_layer.objects.active = activeObj
153
+
return {'FINISHED'}
154
+
155
+
156
+
class NewRestPose_Op(bpy.types.Operator):
157
+
bl_idname = 'xps_tools.new_rest_pose'
158
+
bl_label = 'New Rest Pose'
159
+
bl_description = 'Set Current Pose as The New Rest Pose'
160
+
bl_options = {"PRESET"}
161
+
162
+
@classmethod
163
+
def poll(cls, context):
164
+
return (context.active_object and context.active_object.type == 'ARMATURE'
165
+
and bool(next((obj for obj in context.selected_objects if obj.type == 'MESH'), None)))
166
+
167
+
def action_common(self, context):
168
+
meshes_obs = filter(lambda obj: obj.type == 'MESH', context.selected_objects)
169
+
activeArmature = context.active_object
170
+
for obj in meshes_obs:
171
+
if (obj.find_armature() == activeArmature):
172
+
sourceModif = obj.modifiers[-1]
173
+
if (sourceModif and sourceModif.type == 'ARMATURE'):
174
+
destModif = obj.modifiers.new(sourceModif.name, sourceModif.type)
175
+
176
+
# collect names of writable properties
177
+
properties = [p.identifier for p in destModif.bl_rna.properties
178
+
if not p.is_readonly]
179
+
180
+
# copy those properties
181
+
for prop in properties:
182
+
setattr(destModif, prop, getattr(sourceModif, prop))
183
+
184
+
print(destModif.name)
185
+
bpy.context.view_layer.objects.active = obj
186
+
bpy.ops.object.modifier_apply(modifier=destModif.name)
187
+
188
+
bpy.context.view_layer.objects.active = activeArmature
189
+
bpy.ops.object.mode_set(mode='POSE')
190
+
bpy.ops.pose.armature_apply()
191
+
bpy.ops.object.mode_set(mode='OBJECT')
192
+
193
+
def execute(self, context):
194
+
self.action_common(context)
195
+
return {"FINISHED"}
196
+
197
+
def invoke(self, context, event):
198
+
self.action_common(context)
199
+
return {"FINISHED"}
+166
xnalara_io_Tools/xps_types.py
+166
xnalara_io_Tools/xps_types.py
···
1
+
from . import xps_const
2
+
3
+
4
+
class XpsBone:
5
+
6
+
def __init__(self, id, name, co, parentId):
7
+
self.id = id
8
+
self.name = name
9
+
self.co = co
10
+
self.parentId = parentId
11
+
12
+
# change name, too confusing
13
+
14
+
15
+
class XpsBonePose:
16
+
17
+
def __init__(self, boneName, coordDelta, rotDelta, scale):
18
+
self.boneName = boneName
19
+
self.coordDelta = coordDelta
20
+
self.rotDelta = rotDelta
21
+
self.scale = scale
22
+
23
+
24
+
class XpsMesh:
25
+
26
+
def __init__(self, name, textures, vertices, faces, uvCount):
27
+
self.name = name
28
+
self.textures = textures
29
+
self.vertices = vertices
30
+
self.faces = faces
31
+
self.uvCount = uvCount
32
+
33
+
34
+
class BoneWeight:
35
+
36
+
def __init__(self, id, weight):
37
+
self.id = id
38
+
self.weight = weight
39
+
40
+
41
+
class XpsVertex:
42
+
43
+
def __init__(self, id, co, norm, vColor, uv, boneWeights):
44
+
self.id = id
45
+
self.co = co
46
+
self.norm = norm
47
+
self.vColor = vColor
48
+
self.uv = uv
49
+
self.boneWeights = boneWeights
50
+
self.merged = False
51
+
52
+
def __copy__(self):
53
+
return XpsVertex(
54
+
self.id,
55
+
self.co[:],
56
+
self.norm[:],
57
+
self.vColor[:],
58
+
self.uv[:],
59
+
self.boneWeights
60
+
)
61
+
62
+
# change file to filepath
63
+
64
+
65
+
class XpsTexture:
66
+
67
+
def __init__(self, id, file, uvLayer):
68
+
self.id = id
69
+
self.file = file
70
+
self.uvLayer = uvLayer
71
+
72
+
# change type, to explicit typing
73
+
74
+
75
+
class XpsData:
76
+
77
+
def __init__(self, header='', bones=[], meshes=[]):
78
+
self.header = header
79
+
self.bones = bones
80
+
self.meshes = meshes
81
+
82
+
# rename to XPS file definition
83
+
84
+
85
+
class XpsHeader:
86
+
87
+
def __init__(
88
+
self,
89
+
magic_number=xps_const.MAGIC_NUMBER,
90
+
version_mayor=xps_const.XPS_VERSION_MAYOR,
91
+
version_minor=xps_const.XPS_VERSION_MINOR,
92
+
xna_aral=xps_const.XNA_ARAL,
93
+
settingsLen=xps_const.STRLEN,
94
+
machine='',
95
+
user='',
96
+
files='',
97
+
settings='',
98
+
pose=''):
99
+
self.magic_number = magic_number
100
+
self.version_mayor = version_mayor
101
+
self.version_minor = version_minor
102
+
self.xna_aral = xna_aral
103
+
self.settingsLen = settingsLen
104
+
self.machine = machine
105
+
self.user = user
106
+
self.files = files
107
+
self.settings = settings
108
+
self.pose = pose
109
+
110
+
111
+
class XpsImportSettings:
112
+
113
+
def __init__(
114
+
self,
115
+
filename,
116
+
uvDisplX,
117
+
uvDisplY,
118
+
importDefaultPose,
119
+
joinMeshRips,
120
+
joinMeshParts,
121
+
markSeams,
122
+
vColors,
123
+
connectBones,
124
+
autoIk,
125
+
importNormals,
126
+
separate_optional_objects):
127
+
self.filename = filename
128
+
self.uvDisplX = uvDisplX
129
+
self.uvDisplY = uvDisplY
130
+
self.importDefaultPose = importDefaultPose
131
+
self.joinMeshRips = joinMeshRips
132
+
self.joinMeshParts = joinMeshParts
133
+
self.markSeams = markSeams
134
+
self.vColors = vColors
135
+
self.connectBones = connectBones
136
+
self.autoIk = autoIk
137
+
self.importNormals = importNormals
138
+
self.separate_optional_objects = separate_optional_objects
139
+
140
+
141
+
class XpsExportSettings:
142
+
143
+
def __init__(
144
+
self,
145
+
filename,
146
+
format,
147
+
uvDisplX,
148
+
uvDisplY,
149
+
exportOnlySelected,
150
+
expDefPose,
151
+
preserveSeams,
152
+
vColors,
153
+
exportNormals,
154
+
versionMayor,
155
+
versionMinor):
156
+
self.filename = filename
157
+
self.format = format
158
+
self.uvDisplX = uvDisplX
159
+
self.uvDisplY = uvDisplY
160
+
self.exportOnlySelected = exportOnlySelected
161
+
self.expDefPose = expDefPose
162
+
self.preserveSeams = preserveSeams
163
+
self.vColors = vColors
164
+
self.exportNormals = exportNormals
165
+
self.versionMayor = versionMayor
166
+
self.versionMinor = versionMinor