-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy path__init__.py
More file actions
2443 lines (2213 loc) · 111 KB
/
__init__.py
File metadata and controls
2443 lines (2213 loc) · 111 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Author: Cade Haley
# This code is licensed under Creative Commons Attribution 4.0
import bpy, bmesh, struct, os, re, time, math, mathutils, fnmatch, copy
import xml.etree.ElementTree as ET
from shutil import copyfile
from bpy.props import (StringProperty,
BoolProperty,
IntProperty,
FloatProperty,
FloatVectorProperty,
EnumProperty,
PointerProperty,
)
from bpy.types import (Panel,
Operator,
PropertyGroup,
)
bl_info = {
"name": "HPL3 Export",
"description": "Export objects and materials directly into an HPL3 map",
"author": "cadely",
"version": (3, 18, 0),
"blender": (2, 8, 0),
"location": "3D View > Tools",
"warning": "", # used for warning icon and text in addons panel
"wiki_url": "",
"tracker_url": "",
"category": "Import-Export"
}
# ------------------------------------------------------------------------
# store properties in the active scene
# ------------------------------------------------------------------------
class HPL3_Export_Properties (PropertyGroup):
show_advanced : BoolProperty(default = False)
## Static object and entity properties
casts_shadows : BoolProperty(name="Casts Shadows", default = True)
collides : BoolProperty(name="Collides", default = True)
is_occluder : BoolProperty(name="Is Occluder", default = True)
distance_culling : BoolProperty(name="Distance Culling", default = True)
culled_by_fog : BoolProperty(name="Culled By Fog", default = True)
add_bodies : BoolProperty(
name="Add Basic Physics Bodies",
description="Create a cube body around each subobject which matches the dimensions of the object's bounding box",
default = False
)
def update_map_path(self, context):
if self["map_file_path"] != "":
no_double_slash = re.sub(r'\\\\|//', '', self["map_file_path"])
self["map_file_path"] = os.path.abspath(no_double_slash)
map_file_path : StringProperty(
name="Map File",
description="Set to destination map's main .hpm file. Leave blank to skip map export",
default="",
maxlen=4096,
subtype='FILE_PATH',
update=update_map_path
)
def update_entity_path(self, context):
no_double_slash = re.sub(r'\\\\|//', '', self["entity_export_path"])
self["entity_export_path"] = os.path.abspath(no_double_slash)
entity_export_path : StringProperty(
name="Entities Folder",
description="Destination for all entity .dae and .dds files, which will be grouped into subfolders by mesh datablock name (under the polygon triangle icon). Recommendation: Use one of these asset folders per map, or per project. Also, make one asset folder for entities, and one for static objects",
default="",
maxlen=4096,
subtype='DIR_PATH',
update=update_entity_path
)
def update_statobj_path(self, context):
no_double_slash = re.sub(r'\\\\|//', '', self["statobj_export_path"])
self["statobj_export_path"] = os.path.abspath(no_double_slash)
statobj_export_path : StringProperty(
name="Static Objects Folder",
description="Destination for all static object .dae and .dds files, which will be grouped into subfolders by mesh datablock name (under the polygon triangle icon). Recommendation: Use one of these asset folders per map, or per project. Also, make one asset folder for entities, and one for static objects",
default="",
maxlen=4096,
subtype='DIR_PATH',
update=update_statobj_path
)
bake_scene_lighting : BoolProperty(
name="Bake Scene Lighting (SLOW)",
description="Use Cycles to bake direct and indirect lighting to the diffuse texture (only use for single-use stationary objects). Set samples using Render > Sampling > Render Samples to control quality/time",
default = False
)
def update_square(self, context):
self["map_res_y"] = self["map_res_x"]
square_resolution : BoolProperty(
name="Square",
description="Export a square image",
default = True,
update=update_square
)
def update_res_x_pow2(self, context):
base_2 = round(math.log(self["map_res_x"],2))
base_2 = max(min(base_2, 14), 0)
result = int(math.pow(2, base_2))
self["map_res_x"] = result
if self.square_resolution:
self["map_res_y"] = result
def update_res_y_pow2(self, context):
base_2 = round(math.log(self["map_res_y"],2))
base_2 = max(min(base_2, 14), 0)
result = int(math.pow(2, base_2))
self["map_res_y"] = result
if self["square_resolution"]:
self["map_res_x"] = result
map_res_x : IntProperty(
name = "X",
description="A integer property",
default = 1024,
subtype='PIXEL',
min = 1,
max = 8192,
update=update_res_x_pow2
)
map_res_y : IntProperty(
name = "Y",
description="A integer property",
default = 1024,
subtype='PIXEL',
min = 1,
max = 8192,
update=update_res_y_pow2
)
disable_small_texture_workaround : BoolProperty(
name="Disable Small Texture UV Reset",
description="Only applies to specific situations: When baking output textures smaller than 32x32, Blender has a bug that will produce a black output if UVs are smaller than a pixel. If the output texture is small, the addon automatically bakes these textures with reset UVs, which may not be desirable if you are using small textures intentionally",
default = False
)
disable_uv_smart_project : BoolProperty(
name="Disable UV Smart Project",
description="Prevents this tool from auto-unwrapping UVs. This is useful if you want to use your own manually unwrapped UVs in the actively selected UV map as opposed to having this script run 'Smart Project UVs' under the hood",
default = False
)
sync_blender_deletions : BoolProperty(
name="Clean Up Missing Objects (Read Description)",
description="If objects previously exported with this tool exist in the HPL3 map but not the current Blender scene, delete them from the map and disk. Note: Will erase .dds and .dae files even if they have been modified since the last export (.ent and .mat will be left). Protect your work with Git/other version control!",
default = False
)
bake_multi_mat_into_single : EnumProperty(
name="Bake object materials to",
description="When exporting Blender materials as HPL3-compatible texture sets, create",
items=[ ('OP1', "Textures Per Material - Original UVs", "This will bake textures for each material in Blender, using the active UV map. Exported objects will have a diffuse, specular, and normal map for each material used (Faces assigned to each material can use full UV space)"),
('OP2', "Textures Per Object - Smart Project UVs", "Use this option for more complex material setups, such as those using multiple UV maps or generative elements. This will combine an object's materials into a single, compact texture set. Exported objects will each have one diffuse, specular, and (possibly) normal map (Unwraps and packs all UVs into a single UV space)"),
('OP3', "None (Use existing material)", "TO USE: Locate the material's primary (diffuse) .dds file, and drag-and-drop into the Shader Editor. Then connect the node to 'Base Color' of material's Principled BSDF node to assign an image to the mesh, otherwise mesh will not load")
]
)
entity_option : EnumProperty(
name="Set objects up as",
description="Type of object to be added to the HPL3 map",
items=[ ('OP1', "Static Objects", "Stationary objects with automatic mesh collision. Use for large objects that occlude and are not very high-poly"),
('OP2', "Entities", "Use for interactable/moveable objects and high-poly static items")
]
)
multi_mode: EnumProperty(
items=(('MULTI', "Multi Export", "Export each selected object as its own HPL3 map item"),
('SINGLE', "Single Export", "Export all selected objects as a single HPL3 map item. Active object will be used as the 3D origin and name."),
),
)
# ------------------------------------------------------------------------
# operators - main script
# ------------------------------------------------------------------------
class OBJECT_OT_HPL3_Export (bpy.types.Operator):
bl_idname = "wm.export_selected"
bl_label = "Export Selected"
root = None
asset_xml = None
current_DAE = None
CONVERTERPATH = None
main_tool = None
selected = []
active_object = None
export_path = None
dupes = None
mapgroups = []
maps = []
bsdf_sockets = {}
class ExportError(Exception):
pass
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.mapgroups = []
# Compatibility for BSDF Principled node changes
if bpy.app.version >= (4, 0, 0):
self.bsdf_sockets = {
"Color" : "Base Color",
"Specular" : "Specular IOR Level",
"Roughness" : "Roughness",
"Metallic" : "Metallic",
"Transmission" : "Transmission Weight",
"Normal" : "Normal"
}
else:
self.bsdf_sockets = {
"Color" : "Base Color",
"Specular" : "Specular",
"Roughness" : "Roughness",
"Metallic" : "Metallic",
"Transmission" : "Transmission",
"Normal" : "Normal"
}
# ------------------------------------------------------------------------
# get NVIDIA DDS converter executable
# ------------------------------------------------------------------------
def nvidiaGet(self):
spaths = bpy.utils.script_paths()
for rpath in spaths:
tpath = rpath + '/addons/nvidia/nvidia_dds.exe'
if os.path.exists(tpath):
npath = '"' + tpath + '"'
return npath
if bpy.app.version >= (4, 0, 0):
tpath = bpy.utils.user_resource('EXTENSIONS') + "/user_default/hpl3_export/nvidia/nvidia_dds.exe"
if os.path.exists(tpath):
npath = '"' + tpath + '"'
return npath
return None
# ------------------------------------------------------------------------
# make a copy of the data, unless it's linked. Then make it local
# ------------------------------------------------------------------------
def make_data_copy(self, data):
if (data.library != None):
return data.make_local()
else:
return data.copy()
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
# loop through scene objects and export
# ------------------------------------------------------------------------
def export_objects(self, hpl3export):
# Initialize global vars
self.main_tool = hpl3export
self.selected = bpy.context.selected_objects[:]
self.active_object = bpy.context.active_object
self.export_path = hpl3export.statobj_export_path if hpl3export.entity_option == 'OP1' else hpl3export.entity_export_path
self.export_path = re.sub(r'\\', '/', os.path.normpath(self.export_path)) + "/"
self.maps = {
"ROUGHNESS": self.RoughnessMap(self.bsdf_sockets, "Roughness"),
"PRESPEC": self.PrespecMap(self.bsdf_sockets, "Specular"),
"SPECULAR": self.SpecularMap(self.bsdf_sockets, "Specular"),
"NORMAL": self.NormalMap(self.bsdf_sockets, "Normal"),
"DIFFUSE": self.DiffuseMap(self.bsdf_sockets, "Color"),
}
# Check that objects were selected
if len(self.selected) == 0:
if hpl3export.sync_blender_deletions:
self.report({'WARNING'}, "No objects selected. Cleaning up unused files")
else:
self.report({'WARNING'}, "No objects selected.")
exported_mesh_names = []
export_num = 0
# Save original naming and create duplicates
for ob in self.selected:
if ob.type != "MESH" and ob.type != "ARMATURE":
ob.select_set(False)
else:
ob["hpl3export_obj_name"] = re.sub('[^0-9a-zA-Z]+', '_', ob.name)
ob["hpl3export_mesh_name"] = re.sub('[^0-9a-zA-Z]+', '_', ob.data.name)
ob["hpl3export_hide_render"] = str(ob.hide_render)
# Set original object as unrenderable in case we are baking lighting
ob.hide_render = True
# Find all associated armatures and add to list
if ob.type == "MESH":
for mod in ob.modifiers:
if mod.type == 'ARMATURE':
if mod.object is not None:
mod.object.select_set(True)
bpy.ops.object.duplicate(mode='DUMMY')
self.dupes = bpy.context.selected_objects[:]
# Make the object's data real if it is linked
for dupe in self.dupes:
if (dupe.data.library != None):
dupe.data.make_local()
# Make sure object is renderable for baking
dupe.hide_render = False
# New export for each object
if hpl3export.multi_mode == 'MULTI':
for current in self.dupes:
if current.type == 'MESH':
export_num += 1
self.get_asset_xml_entry(current)
# Prevent re-exporting files for instanced objects
if current.data.name not in exported_mesh_names:
exported_mesh_names.append(current.data.name)
if hpl3export.bake_multi_mat_into_single != 'OP3':
# Deselect all and select object
for ob in bpy.context.selected_objects:
ob.select_set(False)
current.select_set(True)
bpy.context.view_layer.objects.active = current
if hpl3export.bake_multi_mat_into_single == 'OP2':
self.prepare_materials_singletex(hpl3export, current)
else:
self.prepare_materials_multitex(hpl3export, current)
# Add object to map
if hpl3export.map_file_path != "":
self.add_object(hpl3export, current)
if hpl3export.bake_multi_mat_into_single != 'OP3':
self.bake_materials_and_save(hpl3export)
self.delete_unused_textures(hpl3export)
if hpl3export.map_file_path != "" and hpl3export.sync_blender_deletions:
self.sync_blender_deletions(hpl3export)
# Multiple objects, one export
else:
export_num = 1
# Prevent re-exporting files for instanced objects
if hpl3export.bake_multi_mat_into_single != 'OP3':
for current in self.dupes:
if current.type == 'MESH':
if current.data.name not in exported_mesh_names:
exported_mesh_names.append(current.data.name)
# Deselect all and select object
for ob in bpy.context.selected_objects:
ob.select_set(False)
current.select_set(True)
bpy.context.view_layer.objects.active = current
if hpl3export.bake_multi_mat_into_single == 'OP2':
self.prepare_materials_singletex(hpl3export, current)
else:
self.prepare_materials_multitex(hpl3export, current)
self.bake_materials_and_save(hpl3export)
# Add object to map
self.get_asset_xml_entry(self.active_object)
if hpl3export.map_file_path != "":
self.add_object(hpl3export, self.active_object)
if hpl3export.bake_multi_mat_into_single != 'OP3':
self.delete_unused_textures(hpl3export)
self.prepare_and_export(hpl3export)
return export_num
# ------------------------------------------------------------------------
# add object to HPL3 map
# current_obj - blender object being exported
# ------------------------------------------------------------------------
def add_object(self, hpl3export, current_obj):
if hpl3export.entity_option == 'OP2':
is_ent = True
else:
is_ent = False
# Get 'Blender@HPL3EXPORT' section of XML
section = None
for child in self.root:
if child.get("Name") == "Blender@HPL3EXPORT":
section = child
# or make new
if section is None:
section = ET.SubElement(self.root, "Section")
section.attrib["Name"] = "Blender@HPL3EXPORT"
if is_ent:
file_indices = ET.SubElement(section, "FileIndex_Entities")
else:
file_indices = ET.SubElement(section, "FileIndex_StaticObjects")
file_indices.attrib["NumOfFiles"] = "0"
objects = ET.SubElement(section, "Objects")
else:
objects = section.find("Objects")
#BEGIN getting variables
# Assemble .dae/ent path
mesh_name = self.get_custom_property(current_obj, "hpl3export_mesh_name")
if is_ent:
filepath = self.mesh_export_path + "/" + mesh_name + "/" + mesh_name + ".ent"
else:
filepath = self.mesh_export_path + "/" + mesh_name + "/" + mesh_name + ".dae"
filepath = re.sub(r'\\', '/', os.path.normpath(filepath))
short_path = re.sub(r'.*\/SOMA\/', '', filepath)
# Find in file index list
file_indices = section[0]
existing_index = None
current_idx = None
for current_idx in file_indices.iter("File"):
# If there is a match, save the index
if short_path == current_idx.get("Path"):
existing_index = current_idx.get("Id")
break
#self.get_asset_xml_entry(short_path)
# If not in index, make new index and object entry
if existing_index is None:
print("Adding new index entry")
newindex = ET.Element("File")
# If list is entirely empty
if current_idx is None:
existing_index = 0
else:
existing_index = int(current_idx.get("Id")) + 1
newindex.attrib["Id"] = str(existing_index)
newindex.attrib["Path"] = short_path
file_indices.append(newindex)
# Increment NumOfFiles
num_of_files = int(file_indices.attrib['NumOfFiles'])
file_indices.attrib['NumOfFiles'] = str(num_of_files + 1)
# Increment file use number in asset tracking
if self.main_tool.multi_mode == "MULTI":
self.current_DAE.attrib["Uses"] = str(int(self.current_DAE.attrib["Uses"]) + 1)
# Get last StaticObject ID, set to a num in case no entries exist
try:
lastID = int(objects[-1].get("ID")) + 1
except IndexError:
if is_ent:
lastID = 268435459
else:
lastID = 285212672
# Get object name
object_name = self.get_custom_property(current_obj, "hpl3export_obj_name")
# Check object for an armature modifier
is_rigged = False
armature = None
for mod in current_obj.modifiers:
if mod.type == 'ARMATURE':
if mod.object is not None:
is_rigged = True
armature = mod.object
break
# Get world transforms, convert to Y-up
# If it's rigged, get skeleton transforms instead
if is_rigged:
world_mat = armature.matrix_world
else:
world_mat = current_obj.matrix_world
# Reorder vector columns such that Blender X = HPL Z, Blender Y = HPL X, Blender Z = HPL Y
column_reorder = mathutils.Matrix(((0,1,0,0), (0,0,1,0), (1,0,0,0), (0,0,0,1)))
y_up_mat = mathutils.Matrix(((0,-1,0,0), (1,0,0,0), (0,0,1,0), (0,0,0,1)))
local_rot_y = mathutils.Matrix.Rotation(math.radians(90.0), 4, 'Y')
new_mat = column_reorder @ world_mat @ local_rot_y @ y_up_mat
loc, rot, scale = new_mat.decompose()
rot = rot.to_euler()
loc_str = "{:.5f}".format(loc[0]) + " " + "{:.5f}".format(loc[1]) + " " + "{:.5f}".format(loc[2])
rot_str = "{:.5f}".format(rot[0]) + " " + "{:.5f}".format(rot[1]) + " " + "{:.5f}".format(rot[2])
scale_str = "{:.5f}".format(scale[0])+ " " + "{:.5f}".format(scale[1]) + " " + "{:.5f}".format(scale[2])
#END getting variables
# Search for existing entry
old_mod_time = 0
created_new = 0
newobj = None
# If entry exists, update
if is_ent:
obj_type = "Entity"
else:
obj_type = "StaticObject"
for obj in objects.iter(obj_type):
if object_name == obj.get("Name"):
try:
old_mod_time = int(obj.get("ModStamp"))
except ValueError:
old_mod_time = 0
newobj = obj
break
# If does not exist, make new
if newobj is None:
# Create new XML element
newobj = ET.Element(obj_type)
newobj.attrib["ID"] = str(lastID)
newobj.attrib["CreStamp"] = str(int(time.time()))
created_new = 1
newobj.attrib["Name"] = object_name
newobj.attrib["ModStamp"] = str(int(time.time()))
newobj.attrib["WorldPos"] = loc_str
newobj.attrib["Rotation"] = rot_str
newobj.attrib["Scale"] = scale_str
newobj.attrib["FileIndex"] = str(existing_index)
if is_ent:
newobj.attrib["Active"] = "true"
newobj.attrib["Important"] = "false"
else:
if hpl3export.collides:
newobj.attrib["Collides"] = "true"
else:
newobj.attrib["Collides"] = "false"
if hpl3export.casts_shadows:
newobj.attrib["CastShadows"] = "true"
else:
newobj.attrib["CastShadows"] = "false"
if hpl3export.is_occluder:
newobj.attrib["IsOccluder"] = "true"
else:
newobj.attrib["IsOccluder"] = "false"
newobj.attrib["ColorMul"] = "1 1 1 1"
if hpl3export.distance_culling:
newobj.attrib["CulledByDistance"] = "true"
else:
newobj.attrib["CulledByDistance"] = "false"
if hpl3export.culled_by_fog:
newobj.attrib["CulledByFog"] = "true"
else:
newobj.attrib["CulledByFog"] = "False"
newobj.attrib["IllumColor"] = "1 1 1 1"
newobj.attrib["IllumBrightness"] = "1"
newobj.attrib["UID"] = "blender"
if is_ent:
user_variables = newobj.find("UserVariables")
if user_variables is None:
user_variables = ET.SubElement(newobj, "UserVariables")
cast_shadows = None
for var in user_variables.iter("Var"):
if var.get("Name") == "CastShadows":
cast_shadows = var
break
if cast_shadows is None:
var = ET.SubElement(user_variables, "Var")
var.attrib["Name"] = "CastShadows"
if hpl3export.casts_shadows:
var.attrib["Value"] = "true"
else:
var.attrib["Value"] = "false"
if created_new:
objects.append(newobj)
return old_mod_time
# ------------------------------------------------------------------------
# find (or create) an entry in the script's asset tracking xml file
# short_path - path to file with ".../SOMA/" removed
# ------------------------------------------------------------------------
def get_asset_xml_entry(self, object):
# Build filepath
mesh_name = self.get_custom_property(object, "hpl3export_mesh_name")
filepath = self.mesh_export_path + "/" + mesh_name + "/" + mesh_name + ".dae"
filepath = re.sub(r'\\', '/', os.path.normpath(filepath))
short_path = re.sub(r'.*\/SOMA\/', '', filepath)
# Find asset path in asset XML list
asset_listed = 0
for asset in self.asset_xml.iter("Asset"):
if short_path == asset.get("DAEpath"):
asset_listed = 1
self.current_DAE = asset
break
if not asset_listed:
# If asset not listed, save asset path to asset tracking xml list
self.current_DAE = ET.SubElement(self.asset_xml, "Asset")
self.current_DAE.attrib["DAEpath"] = short_path
self.current_DAE.attrib["Uses"] = "0"
def get_custom_property(self, object, prop):
# Name fallback in case object is an empty, which is allowed
if prop in object:
result = object[prop]
else:
result = object.name
return result
class MetaMaterial:
original = None
material = None
principled_node = None
class MetaImage:
image = None
temp_path = ""
temp_image = ""
microimage = None
is_microimage = False
bsdf_sockets = None
socket_name = None
def __init__(self, bsdf_sockets, socket_name):
self.bsdf_sockets = bsdf_sockets
self.socket_name = bsdf_sockets[socket_name]
class MetaMesh:
object = None
mesh_original = None
mesh_with_reset_uvs = None
mesh_with_applied_modifiers = None
def __init__(self, object):
self.object = object
self.mesh_original = object.data
def make_data_copy(self, data):
if (data.library != None):
return data.make_local()
else:
return data.copy()
def create_mesh_with_reset_uvs(self):
current_mesh_name = self.object.data.name
new_mesh = self.make_data_copy(self.object.data)
uv_layers = new_mesh.uv_layers
old_uv_idx = -1
uv_names = []
for idx, layer in enumerate(uv_layers):
uv_names.append(layer.name)
if layer.active_render == True:
old_uv_idx = idx
for i in range(0, len(uv_layers)):
uv_layers.remove(uv_layers[0])
for name in uv_names:
uv_layers.new(name=name)
uv_layers[old_uv_idx].active = True
uv_layers[old_uv_idx].active_render = True
self.mesh_with_reset_uvs = new_mesh
class MapGroup:
metaimages = {}
metamats = []
metameshes = []
mat_paths = []
def __init__(self):
self.metaimages = {}
self.metamats = []
self.metameshes = []
self.mat_paths = []
def prepare_pre_bake(self, mapname):
for metamat in self.metamats:
node_tree = metamat.material.node_tree
image_node = node_tree.nodes["HPL3EXPORT_" + mapname]
self.metaimages[mapname].pre_bake(metamat, node_tree, image_node)
node_tree.nodes.active = image_node
def prepare_post_bake(self, mapname):
for metamat in self.metamats:
node_tree = metamat.material.node_tree
image_node = node_tree.nodes["HPL3EXPORT_" + mapname]
self.metaimages[mapname].post_bake(metamat, node_tree, image_node)
node_tree.nodes.active = None
def special_bake(self, mapname):
metamat = self.metamats[0]
node_tree = metamat.material.node_tree
image_node = node_tree.nodes["HPL3EXPORT_" + mapname]
self.metaimages[mapname].bake(self, metamat, node_tree, image_node)
def prepare_materials_singletex(self, hpl3export, current_obj):
mapgroup = self.MapGroup()
self.mapgroups.append(mapgroup)
metamesh = self.MetaMesh(current_obj)
if metamesh not in mapgroup.metameshes:
mapgroup.metameshes.append(metamesh)
mesh_name = current_obj["hpl3export_mesh_name"]
if len(current_obj.material_slots) == 0:
bpy.ops.object.material_slot_add()
for idx,slot in enumerate(current_obj.material_slots):
if slot.material is None:
self.set_slot_to_default_material_singletex(mesh_name, current_obj, mapgroup, slot)
continue
# if temp material isn't in current mapgroup
temp_mat_name = "hpl3export_" + mesh_name + "_" + slot.material.name
metamat = None
for existing in mapgroup.metamats:
if existing.material.name == temp_mat_name:
metamat = existing
if metamat is None:
metamat = self.MetaMaterial()
metamat.original = slot.material
# If material isn't valid, make new
if self.get_principled_node(slot.material) is None:
metamat.material = self.make_valid_material(slot.material, temp_mat_name)
else:
# Make a copy
metamat.material = self.make_data_copy(slot.material)
metamat.material.name = temp_mat_name
metamat.principled_node = self.get_principled_node(metamat.material)
self.prepare_principled_node(metamat.principled_node)
# Add material to mapgroup
mapgroup.metamats.append(metamat)
slot.material = metamat.material
self.create_mapgroup_maps(hpl3export, mapgroup, self.get_export_dir(hpl3export, mesh_name), mesh_name)
if len(current_obj.data.uv_layers) == 0:
new_uv = uv_layers.new()
self.smart_project_uvs(current_obj)
else:
self.create_single_uv_map(hpl3export, current_obj)
metamesh.create_mesh_with_reset_uvs()
return
def prepare_materials_multitex(self, hpl3export, current_obj):
mapgroup = None
metamesh = None
for mg in self.mapgroups:
for existing in mg.metameshes:
if current_obj == existing.object:
metamesh = existing
if metamesh is None:
metamesh = self.MetaMesh(current_obj)
if len(current_obj.material_slots) == 0:
bpy.ops.object.material_slot_add()
for idx,slot in enumerate(current_obj.material_slots):
if slot.material is None:
self.set_slot_to_default_material_multitex(hpl3export, current_obj, slot, metamesh)
continue
# if temp material isn't in current mapgroup
temp_mat_name = "hpl3export_" + slot.material.name
metamat = None
for existing in self.mapgroups:
if existing.metamats[0].material.name == temp_mat_name:
mapgroup = existing
metamat = existing.metamats[0]
if metamat is None:
mapgroup = self.MapGroup()
self.mapgroups.append(mapgroup)
metamat = self.MetaMaterial()
metamat.original = slot.material
# If material isn't valid, make new
if self.get_principled_node(slot.material) is None:
metamat.material = self.make_valid_material(slot.material, temp_mat_name)
else:
# Make a copy
metamat.material = self.make_data_copy(slot.material)
metamat.material.name = temp_mat_name
metamat.principled_node = self.get_principled_node(metamat.material)
# Add material to mapgroup
self.prepare_principled_node(metamat.principled_node)
mapgroup.metamats.append(metamat)
mesh_name = current_obj["hpl3export_mesh_name"]
self.create_mapgroup_maps(hpl3export, mapgroup, self.get_export_dir(hpl3export, mesh_name), temp_mat_name)
slot.material = metamat.material
if metamesh not in mapgroup.metameshes:
mapgroup.metameshes.append(metamesh)
if len(current_obj.data.uv_layers) == 0:
new_uv = current_obj.data.uv_layers.new()
self.smart_project_uvs(current_obj)
metamesh.create_mesh_with_reset_uvs()
return
def get_principled_node(self, mat):
# Find Principled node
if mat.use_nodes:
if mat.node_tree is not None:
for node in mat.node_tree.nodes:
if (node.type == 'BSDF_PRINCIPLED'):
return node
return None
def prepare_principled_node(self, node):
node.inputs[self.bsdf_sockets["Metallic"]].default_value = 0
node.inputs[self.bsdf_sockets["Transmission"]].default_value = 0
return
def make_valid_material(self, original, temp_mat_name):
mat = bpy.data.materials.new(temp_mat_name)
mat.use_nodes = True
principled_name = "Principled BSDF"
# Copy base, spec, and roughness
mat.node_tree.nodes[principled_name].inputs[self.bsdf_sockets["Color"]].default_value = (
original.diffuse_color[0], original.diffuse_color[1], original.diffuse_color[2], 1
)
mat.node_tree.nodes[principled_name].inputs[self.bsdf_sockets["Specular"]].default_value = original.specular_intensity
mat.node_tree.nodes[principled_name].inputs[self.bsdf_sockets["Roughness"]].default_value = original.roughness
return mat
def add_basic_material(self, current_obj, name):
if name not in bpy.data.materials:
mat = bpy.data.materials.new(name)
else:
mat = bpy.data.materials[name]
mat.use_nodes = True
default_metamat = self.MetaMaterial()
default_metamat.original = mat
default_metamat.material = mat
default_metamat.principled_node = mat.node_tree.nodes["Principled BSDF"]
return default_metamat
def set_slot_to_default_material_singletex(self, mesh_name, current_obj, mapgroup, slot):
default_mat_name = "hpl3export_" + mesh_name + "_default"
default_metamat = self.add_basic_material(current_obj, default_mat_name)
mapgroup.metamats.append(default_metamat)
slot.material = default_metamat.material
def set_slot_to_default_material_multitex(self, hpl3export, current_obj, slot, metamesh):
mapgroup = None
default_mat_name = "hpl3export_default"
for idx_mapgroup in self.mapgroups:
if idx_mapgroup.metamats[0].material.name == default_mat_name:
mapgroup = idx_mapgroup
default_metamat = self.add_basic_material(current_obj, default_mat_name)
if mapgroup is None:
mapgroup = self.MapGroup()
self.mapgroups.append(mapgroup)
default_metamat = self.add_basic_material(current_obj, default_mat_name)
mapgroup.metamats.append(default_metamat)
self.create_mapgroup_maps(hpl3export, mapgroup, self.get_export_dir(hpl3export, current_obj["hpl3export_mesh_name"]), default_mat_name)
if metamesh not in mapgroup.metameshes:
mapgroup.metameshes.append(metamesh)
slot.material = default_metamat.material
def create_single_uv_map(self, hpl3export, current_obj):
uv_layers = current_obj.data.uv_layers
# Hack: delete a slot if we are full
if len(uv_layers) == 8:
idx_to_remove = 7 if uv_layers.active_index != 7 else 6
uv_layers.remove(uv_layers[idx_to_remove])
# create new called "hpl3uv", select, and unwrap w/o scaling to uv bounds
new_uv = uv_layers.new(name="hpl3uv")
new_uv.active = True
# Requires object to be the only object selected
if not hpl3export.disable_uv_smart_project:
self.smart_project_uvs(current_obj)
return
def smart_project_uvs(self, current_obj):
if bpy.app.version >= (2, 91, 0):
for poly in current_obj.data.polygons:
poly.select = True
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.uv.smart_project(angle_limit=math.radians(70.0), island_margin = 0.002, correct_aspect=False, scale_to_bounds=False)
bpy.ops.object.mode_set(mode='OBJECT')
else:
bpy.ops.uv.smart_project(angle_limit=85.0, island_margin = 0.02, use_aspect=False, stretch_to_bounds=False)
def create_mapgroup_maps(self, hpl3export, mapgroup, export_dir, base_name):
# Find if normal map is used
using_nmap = False
for metamat in mapgroup.metamats:
if metamat.principled_node.inputs[self.bsdf_sockets["Normal"]].is_linked:
using_nmap = True
# Make maps
for maptype, map in self.maps.items():
mi = copy.deepcopy(map)
if maptype == "NORMAL" and not using_nmap:
# Skip if single export, otherwise make one and just don't export it
if hpl3export.bake_multi_mat_into_single == 'OP2':
continue
else:
mi.exportable = False
res_x = res_y = 0
socket_linked = False
for metamat in mapgroup.metamats:
result_x, result_y, _socket_linked = self.get_optimal_image_size(hpl3export, metamat.principled_node.inputs[map.socket_name])
res_x = max(result_x, res_x)
res_y = max(result_y, res_y)
if _socket_linked:
socket_linked = True
# Single texture
requires_full_res = socket_linked or len(mapgroup.metamats) > 1 or hpl3export.bake_scene_lighting
if (hpl3export.bake_multi_mat_into_single == 'OP2' and requires_full_res):
res_x = hpl3export.map_res_x
res_y = hpl3export.map_res_y
# Multi texture
requires_full_res = hpl3export.bake_scene_lighting
if (hpl3export.bake_multi_mat_into_single == 'OP1' and requires_full_res):
res_x = hpl3export.map_res_x
res_y = hpl3export.map_res_y
map_res_x = int(res_x/2) if maptype in ('ROUGHNESS', 'PRESPEC') else res_x
map_res_y = int(res_y/2) if maptype in ('ROUGHNESS', 'PRESPEC') else res_y
bpy.ops.image.new(name=base_name + "_" + maptype, width=map_res_x, height=map_res_y)
mi.image = bpy.context.blend_data.images[base_name + "_" + maptype]
mi.is_microimage = map_res_x < 32 or map_res_y < 32
if not hpl3export.disable_small_texture_workaround:
bpy.ops.image.new(name=base_name + "_" + maptype + "_micro", width=4, height=4)
mi.microimage = bpy.context.blend_data.images[base_name + "_" + maptype + "_micro"]
mi.temp_path = export_dir + re.sub('[^0-9a-zA-Z]+', '_', base_name)
mapgroup.metaimages[maptype] = mi
# Add image texture node to mapgroup materials
for metamat in mapgroup.metamats:
img_node = metamat.material.node_tree.nodes.new("ShaderNodeTexImage")
img_node.name = "HPL3EXPORT_" + maptype
img_node.image = mi.image
return
# ------------------------------------------------------------------------
# traverse socket-connected images to find optimal baking resolution
# socket - socket belonging to the material's Principled BSDF node
# Returns: optimal x resolution, optimal y resolution, True if socket is linked
# ------------------------------------------------------------------------
def get_optimal_image_size(self, hpl3export, socket):
if not socket.is_linked:
return 4, 4, False
# set resolution to same as source
# traverse socket subtree to find max res in image nodes
subtree_nodes = [socket.links[0].from_node]
node_list = []
while len(subtree_nodes) != 0:
for input in subtree_nodes[0].inputs:
if (input.is_linked):
# Will have duplicates but will include all in subtree
subtree_nodes.append(input.links[0].from_node)
node_list.append(subtree_nodes[0])
subtree_nodes.pop(0)
max_res_x = max_res_y = 0
for node in node_list:
if (type(node) == bpy.types.ShaderNodeTexImage):
if(node.image is not None):
max_res_x = max(node.image.size[0], max_res_x)
max_res_y = max(node.image.size[1], max_res_y)
if max_res_x != 0 and max_res_y != 0:
base_2 = round(math.log(max_res_x,2))
base_2 = max(min(base_2, 14), 0)
max_res_x = int(math.pow(2, base_2))
# Limit to max bake size x
max_res_x = min(max_res_x, hpl3export.map_res_x)
base_2 = round(math.log(max_res_y,2))
base_2 = max(min(base_2, 14), 0)
max_res_y = int(math.pow(2, base_2))
# Limit to max bake size y
max_res_y = min(max_res_y, hpl3export.map_res_y)
else:
max_res_x = hpl3export.map_res_x
max_res_y = hpl3export.map_res_y
return max_res_x, max_res_y, True
class RoughnessMap(MetaImage):
name = "ROUGHNESS"
suffix = "_rough"
exportable = False
special_bake_func = False
bake_using_diffuse = False
def pre_bake(self, metamat, node_tree, image_node):
print(self.name + " map pre-bake")
def post_bake(self, metamat, node_tree, image_node):
print(self.name + " map post-bake")
class PrespecMap(MetaImage):
name = "PRESPEC"
suffix = "_prespec"
exportable = False
special_bake_func = False
bake_using_diffuse = True
def pre_bake(self, metamat, node_tree, image_node):
print(self.name + " map pre-bake")
spec_socket = metamat.principled_node.inputs[self.bsdf_sockets["Specular"]]
diff_socket = metamat.principled_node.inputs[self.bsdf_sockets["Color"]]
# If diff socket is linked, change the name and label
if diff_socket.is_linked:
diff_socket.links[0].from_node.name = "HPL3_ORIGINALDIFF"
diff_socket.links[0].from_node.label = diff_socket.links[0].from_socket.name
if not spec_socket.is_linked:
# Create RGB node and attach
spec_value = math.sqrt(spec_socket.default_value) * 0.8
node = node_tree.nodes.new("ShaderNodeRGB")
node.outputs[0].default_value = (spec_value, spec_value, spec_value, 1.0)
node.name = "HPL3_RGB"
node_tree.links.new(node.outputs[0], diff_socket)
else:
spec_out_socket = None
for link in node_tree.links:
if (link.to_socket == spec_socket):