diff --git a/i_scene_cp77_gltf/cyber_prefs.py b/i_scene_cp77_gltf/cyber_prefs.py index a28da11..e5a2759 100644 --- a/i_scene_cp77_gltf/cyber_prefs.py +++ b/i_scene_cp77_gltf/cyber_prefs.py @@ -68,19 +68,20 @@ class CP77IOSuitePreferences(AddonPreferences): def draw(self, context): layout = self.layout box = layout.box() - row = box.row() row.prop(self, "show_modtools",toggle=1) row.prop(self, "experimental_features",toggle=1) row.prop(self, "non_verbose",toggle=1) if self.experimental_features: + box = layout.box() + box.label(text="Material Depot Path:") row = box.row() - row.prop(self, "depotfolder_path") + row.prop(self, "depotfolder_path", text="") row = box.row() if self.show_modtools: row.alignment = 'LEFT' box = layout.box() - box.label(text="Mod Tools Properties") + box.label(text="Mod Tools Preferences") split = row.split(factor=0.5,align=True) col = split.column(align=True) row.alignment = 'LEFT' diff --git a/i_scene_cp77_gltf/cyber_props.py b/i_scene_cp77_gltf/cyber_props.py index 351eb54..6c2dc53 100644 --- a/i_scene_cp77_gltf/cyber_props.py +++ b/i_scene_cp77_gltf/cyber_props.py @@ -4,7 +4,7 @@ from bpy.props import (StringProperty, EnumProperty, BoolProperty, CollectionProperty, FloatProperty, IntProperty, PointerProperty) from .main.physmat_lib import physmat_list #from . meshtools import (CP77CollectionList) -from .main.common import get_classes, get_rig_dir, get_refit_dir, get_resources_dir +from .main.common import get_classes, get_rig_dir, get_refit_dir, get_resources_dir, update_presets_items import sys resources_dir = get_resources_dir() @@ -15,6 +15,7 @@ def CP77RefitList(context): + target_addon_paths = [None] target_addon_names = ['None'] @@ -41,6 +42,8 @@ def CP77RefitList(context): # Return the list of tuples return target_body_paths, target_body_names +#def VertColourPresetList + def SetCyclesRenderer(use_cycles=True, set_gi_params=False): # set the render engine for all scenes to Cycles @@ -101,6 +104,7 @@ def CP77ArmatureList(self, context): print(f"Error accessing bpy.data.objects: {e}") arms = [] return arms + class CP77_PT_PanelProps(PropertyGroup): # collision panel props: @@ -113,7 +117,12 @@ class CP77_PT_PanelProps(PropertyGroup): ], default='VEHICLE' ) - + + vertex_color_presets: EnumProperty( + name="Vertex Color Preset", + items=lambda self, context: update_presets_items() or [(name, name, "") for name in get_colour_presets().keys()] + ) + physics_material: EnumProperty( items= enum_items, name="Physics Material", @@ -248,6 +257,12 @@ class CP77_PT_PanelProps(PropertyGroup): name="With Materials", default=True, description="Import Wolvenkit-exported materials" + ) + + axl_yaml: BoolProperty( + name="Use YAML instead of JSON", + default=False, + description="Use the ArchiveXL YAML format instead of JSON format for generated .xl files" ) def add_anim_props(animation, action): @@ -295,7 +310,7 @@ def register_props(): for cls in other_classes: bpy.utils.register_class(cls) Scene.cp77_panel_props = PointerProperty(type=CP77_PT_PanelProps) - + update_presets_items() def unregister_props(): for cls in reversed(other_classes): diff --git a/i_scene_cp77_gltf/exporters/__init__.py b/i_scene_cp77_gltf/exporters/__init__.py index 9a225e3..5a71e89 100644 --- a/i_scene_cp77_gltf/exporters/__init__.py +++ b/i_scene_cp77_gltf/exporters/__init__.py @@ -24,9 +24,15 @@ class CP77StreamingSectorExport(Operator,ExportHelper): bl_description = "Export changes to Sectors back to project" filename_ext = ".cpmodproj" filter_glob: StringProperty(default="*.cpmodproj", options={'HIDDEN'}) + + def draw(self, context): + props = context.scene.cp77_panel_props + layout = self.layout + layout.prop(props, "axl_yaml") def execute(self, context): - exportSectors(self.filepath) + use_yaml = context.scene.cp77_panel_props.axl_yaml + exportSectors(self.filepath, use_yaml) return {'FINISHED'} diff --git a/i_scene_cp77_gltf/exporters/glb_export.py b/i_scene_cp77_gltf/exporters/glb_export.py index 9f6d116..c814278 100644 --- a/i_scene_cp77_gltf/exporters/glb_export.py +++ b/i_scene_cp77_gltf/exporters/glb_export.py @@ -1,5 +1,6 @@ import bpy from .. animtools import reset_armature +from ..main.common import show_message #setup the default options to be applied to all export types def default_cp77_options(): @@ -54,8 +55,6 @@ def pose_export_options(): } return options -#setup the actual exporter - rewrote almost all of this, much quicker now - red_color = (1, 0, 0, 1) # RGBA garment_cap_name="_GarmentSupportCap" garment_weight_name="_GarmentSupportWeight" @@ -93,94 +92,94 @@ def add_garment_cap(mesh): if cap_layer != None and loop_index < (len(cap_layer.data)): cap_layer.data[loop_index].color = red_color - -# setup the actual exporter - rewrote almost all of this, much quicker now # mana: by assigning default attributes, we make this update-safe (some older scripts broke). Just don't re-name them! def export_cyberpunk_glb(context, filepath, export_poses=False, export_visible=False, limit_selected=True, static_prop=False, red_garment_col=False, apply_transform=True): - groupless_bones = set() - bone_names = [] - #check if the scene is in object mode, if it's not, switch to object mode if bpy.context.mode != 'OBJECT': bpy.ops.object.mode_set(mode='OBJECT') objects = context.selected_objects - armatures = [obj for obj in objects if obj.type == 'ARMATURE'] - + options = default_cp77_options() + #if for photomode, make sure there's an armature selected, if not use the message box to show an error if export_poses: + armatures = [obj for obj in objects if obj.type == 'ARMATURE'] if not armatures: - bpy.ops.cp77.message_box('INVOKE_DEFAULT', message="No armature objects are selected, please select an armature") + show_message("No armature objects are selected, please select an armature") return {'CANCELLED'} - for action in bpy.data.actions: - if "schema" not in action: - action["schema"] ={"type": "wkit.cp2077.gltf.anims","version": 4} - if "animationType" not in action: - action["animationType"] = 'Normal' - if "rootMotionType" not in action: - action["rootMotionType"] = 'None' - if "frameClamping" not in action: - action["frameClamping"] = True - if "frameClampingStartFrame" not in action: - action["frameClampingStartFrame"] = -1 - if "frameClampingEndFrame" not in action: - action["frameClampingEndFrame"] = -1 - if "numExtraJoints" not in action: - action["numExtraJoints"] = 0 - if "numeExtraTracks" not in action: - action["numeExtraTracks"] = 0 - if "constTrackKeys" not in action: - action["constTrackKeys"] = [] - if "trackKeys" not in action: - action["trackKeys"] = [] - if "fallbackFrameIndices" not in action: - action["fallbackFrameIndices"] = [] - if "optimizationHints" not in action: - action["optimizationHints"] = { "preferSIMD": False, "maxRotationCompression": 1} - - #if the export poses value is True, set the export options to ensure the armature is exported properly with the animations - options = default_cp77_options() - options.update(pose_export_options()) - for armature in armatures: - reset_armature(armature, context) - print(options) - bpy.ops.export_scene.gltf(filepath=filepath, use_selection=True, **options) - # TODO should that be here? - return{'FINISHED'} - - return {'FINISHED'} - - if not limit_selected: - for obj in bpy.data.objects: - if obj.type == 'MESH' and not "Icosphere" in obj.name: - obj.select_set(True) + + export_anims(context, filepath, options, armatures) + return{'FINISHED'} #if export_poses option isn't used, check to make sure there are meshes selected and throw an error if not - meshes = [obj for obj in objects if obj.type == 'MESH' and not "Icosphere" in obj.name] #throw an error in the message box if you haven't selected a mesh to export if not export_poses: + meshes = [obj for obj in objects if obj.type == 'MESH' and not "Icosphere" in obj.name] if not meshes: - bpy.ops.cp77.message_box('INVOKE_DEFAULT', message="No meshes selected, please select at least one mesh") + show_message("No meshes selected, please select at least one mesh") return {'CANCELLED'} + export_meshes(context, filepath, export_visible, limit_selected, static_prop, red_garment_col, apply_transform, meshes, options) + +def export_anims(context, filepath, options, armatures): + for action in bpy.data.actions: + if "schema" not in action: + action["schema"] ={"type": "wkit.cp2077.gltf.anims","version": 4} + if "animationType" not in action: + action["animationType"] = 'Normal' + if "rootMotionType" not in action: + action["rootMotionType"] = 'Unknown' + if "frameClamping" not in action: + action["frameClamping"] = True + if "frameClampingStartFrame" not in action: + action["frameClampingStartFrame"] = -1 + if "frameClampingEndFrame" not in action: + action["frameClampingEndFrame"] = -1 + if "numExtraJoints" not in action: + action["numExtraJoints"] = 0 + if "numeExtraTracks" not in action: + action["numeExtraTracks"] = 0 + if "constTrackKeys" not in action: + action["constTrackKeys"] = [] + if "trackKeys" not in action: + action["trackKeys"] = [] + if "fallbackFrameIndices" not in action: + action["fallbackFrameIndices"] = [] + if "optimizationHints" not in action: + action["optimizationHints"] = { "preferSIMD": False, "maxRotationCompression": 1} + + options.update(pose_export_options()) + for armature in armatures: + reset_armature(armature, context) + print(options) + bpy.ops.export_scene.gltf(filepath=filepath, use_selection=True, **options) + # TODO should that be here? + return{'FINISHED'} + return {'FINISHED'} - #check that meshes include UVs and have less than 65000 verts, throw an error if not - for mesh in meshes: +def export_meshes(context, filepath, export_visible, limit_selected, static_prop, red_garment_col, apply_transform, meshes, options): + groupless_bones = set() + bone_names = [] + options.update(cp77_mesh_options()) + if not limit_selected: + for obj in bpy.data.objects: + if obj.type == 'MESH' and not "Icosphere" in obj.name: + obj.select_set(True) + for mesh in meshes: # apply transforms if apply_transform: bpy.ops.object.transform_apply(location=True, rotation=True, scale=True) if not mesh.data.uv_layers: - bpy.ops.cp77.message_box('INVOKE_DEFAULT', message="Meshes must have UV layers in order to import in Wolvenkit. See https://tinyurl.com/uv-layers") + show_message("Meshes must have UV layers in order to import in Wolvenkit. See https://tinyurl.com/uv-layers") return {'CANCELLED'} #check submesh vertex count to ensure it's less than the maximum for import vert_count = len(mesh.data.vertices) if vert_count > 65535: - message=(f"{mesh.name} has {vert_count} vertices. Each submesh must have less than 65,535 vertices. See https://tinyurl.com/vertex-count") - bpy.ops.cp77.message_box('INVOKE_DEFAULT', message=message) + show_message(f"{mesh.name} has {vert_count} vertices. Each submesh must have less than 65,535 vertices. See https://tinyurl.com/vertex-count") return {'CANCELLED'} #check that faces are triangulated, cancel export, switch to edit mode with the untriangulated faces selected and throw an error @@ -189,8 +188,11 @@ def export_cyberpunk_glb(context, filepath, export_poses=False, export_visible=F bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type='FACE') bpy.ops.mesh.select_face_by_sides(number=3, type='NOTEQUAL', extend=False) - bpy.ops.cp77.message_box('INVOKE_DEFAULT', message="All faces must be triangulated before exporting. Untriangulated faces have been selected for you. See https://tinyurl.com/triangulate-faces") + show_message("All faces must be triangulated before exporting. Untriangulated faces have been selected for you. See https://tinyurl.com/triangulate-faces") return {'CANCELLED'} + + if red_garment_col: + add_garment_cap(mesh) # Check for ungrouped vertices, if they're found, switch to edit mode and select them # No need to do this for static props @@ -201,105 +203,80 @@ def export_cyberpunk_glb(context, filepath, export_poses=False, export_visible=F bpy.ops.mesh.select_mode(type='VERT') try: bpy.ops.mesh.select_ungrouped() - bpy.ops.cp77.message_box('INVOKE_DEFAULT', message=f"Ungrouped vertices found and selected in: {mesh.name}. See https://tinyurl.com/ungrouped-vertices") + show_message(f"Ungrouped vertices found and selected in: {mesh.name}. See https://tinyurl.com/ungrouped-vertices") except RuntimeError: - bpy.ops.cp77.message_box('INVOKE_DEFAULT', message=f"No vertex groups in: {mesh.name} are assigned weights. Assign weights before exporting. See https://tinyurl.com/assign-vertex-weights") + show_message(f"No vertex groups in: {mesh.name} are assigned weights. Assign weights before exporting. See https://tinyurl.com/assign-vertex-weights") return {'CANCELLED'} - if red_garment_col: - add_garment_cap(mesh) - - # set the export options for meshes - options = default_cp77_options() - options.update(cp77_mesh_options()) + armature_modifier = None + for modifier in mesh.modifiers: + if modifier.type == 'ARMATURE' and modifier.object: + armature_modifier = modifier + break - #print the options to the console - print(options) - - # if exporting meshes, iterate through any connected armatures, store their current state. if hidden, unhide them and select them for export - armature_states = {} + if not armature_modifier: + show_message((f"Armature missing from: {mesh.name} Armatures are required for movement. If this is intentional, try 'export as static prop'. See https://tinyurl.com/armature-missing")) + return {'CANCELLED'} + + armature = armature_modifier.object + + # Make necessary to armature visibility and selection state for export + armature.hide_set(False) + armature.select_set(True) + + for bone in armature.pose.bones: + bone_names.append(bone.name) + + if armature_modifier.object != mesh.parent: + armature_modifier.object = mesh.parent + + group_has_bone = {group.index: False for group in mesh.vertex_groups} + # groupless_bones = {} + for group in mesh.vertex_groups: + if group.name in bone_names: + group_has_bone[group.index] = True + # print(vertex_group.name) + + # Add groups with no weights to the set + for group_index, has_bone in group_has_bone.items(): + if not has_bone: + groupless_bones.add(mesh.vertex_groups[group_index].name) + + if len(groupless_bones) != 0: + bpy.ops.object.mode_set(mode='OBJECT') # Ensure in object mode for consistent behavior + groupless_bones_list = ", ".join(sorted(groupless_bones)) + armature.hide_set(True) + show_message((f"The following vertex groups are not assigned to a bone, this will result in blender creating a neutral_bone and cause Wolvenkit import to fail: {groupless_bones_list}\nSee https://tinyurl.com/unassigned-bone")) + return {'CANCELLED'} - if not static_prop: - for obj in objects: - if obj.type == 'MESH' and obj.select_get(): - armature_modifier = None - for modifier in obj.modifiers: - if modifier.type == 'ARMATURE' and modifier.object: - armature_modifier = modifier - break - - if not armature_modifier: - bpy.ops.cp77.message_box('INVOKE_DEFAULT', message=(f"Armature missing from: {obj.name} Armatures are required for movement. If this is intentional, try 'export as static prop'. See https://tinyurl.com/armature-missing")) - return {'CANCELLED'} - # Store original visibility and selection state - armature = armature_modifier.object - armature_states[armature] = {"hide": armature.hide_get(), - "select": armature.select_get()} - - # Make necessary to armature visibility and selection state for export - armature.hide_set(False) - armature.select_set(True) - - for bone in armature.pose.bones: - bone_names.append(bone.name) - - if armature_modifier.object != mesh.parent: - armature_modifier.object = mesh.parent - - group_has_bone = {group.index: False for group in obj.vertex_groups} - # groupless_bones = {} - for group in obj.vertex_groups: - if group.name in bone_names: - group_has_bone[group.index] = True - # print(vertex_group.name) - - # Add groups with no weights to the set - for group_index, has_bone in group_has_bone.items(): - if not has_bone: - groupless_bones.add(obj.vertex_groups[group_index].name) - - if len(groupless_bones) != 0: - bpy.ops.object.mode_set(mode='OBJECT') # Ensure in object mode for consistent behavior - groupless_bones_list = ", ".join(sorted(groupless_bones)) - armature.hide_set(True) - bpy.ops.cp77.message_box('INVOKE_DEFAULT', message=(f"The following vertex groups are not assigned to a bone, this will result in blender creating a neutral_bone and cause Wolvenkit import to fail: {groupless_bones_list}\nSee https://tinyurl.com/unassigned-bone")) - return {'CANCELLED'} + if mesh.data.name != mesh.name: + mesh.data.name = mesh.name - if mesh.data.name != mesh.name: - mesh.data.name = mesh.name + if limit_selected: + try: + bpy.ops.export_scene.gltf(filepath=filepath, use_selection=True, **options) + if not static_prop: + armature.hide_set(True) + except Exception as e: + print(e) - if limit_selected: + else: + if export_visible: try: - bpy.ops.export_scene.gltf(filepath=filepath, use_selection=True, **options) + bpy.ops.export_scene.gltf(filepath=filepath, use_visible=True, **options) if not static_prop: armature.hide_set(True) except Exception as e: print(e) else: - if export_visible: - try: - bpy.ops.export_scene.gltf(filepath=filepath, use_visible=True, **options) - if not static_prop: - armature.hide_set(True) - except Exception as e: - print(e) - - else: - try: - bpy.ops.export_scene.gltf(filepath=filepath, **options) - if not static_prop: - armature.hide_set(True) - except Exception as e: - print(e) - - - # Restore original armature visibility and selection states - # for armature, state in armature_states.items(): - # armature.select_set(state["select"]) - # armature.hide_set(state["hide"]) - - + try: + bpy.ops.export_scene.gltf(filepath=filepath, **options) + if not static_prop: + armature.hide_set(True) + except Exception as e: + print(e) + # def ExportAll(self, context): # #Iterate through all objects in the scene def ExportAll(self, context): diff --git a/i_scene_cp77_gltf/exporters/sectors_export.py b/i_scene_cp77_gltf/exporters/sectors_export.py index 4d9ef3c..7e96a64 100644 --- a/i_scene_cp77_gltf/exporters/sectors_export.py +++ b/i_scene_cp77_gltf/exporters/sectors_export.py @@ -26,6 +26,7 @@ # - sort out instanced bits import json +from ..jsontool import jsonload import glob import os import bpy @@ -33,6 +34,7 @@ from ..main.common import * from mathutils import Vector, Matrix, Quaternion from os.path import join +from ..cyber_props import * def are_matrices_equal(mat1, mat2, tolerance=0.01): if len(mat1) != len(mat2): @@ -52,36 +54,41 @@ def are_matrices_equal(mat1, mat2, tolerance=0.01): # pip.main(['install', 'pyyaml']) # yamlavail=False -try: - import yaml - yamlavail=True -except ModuleNotFoundError: - from ..install_dependency import * - install_dependency('pyyaml') - messages = [ - "pyyaml not available. Please start Blender as administrator." - "If that doesn't help, switch to Blender's scripting perspective, create a new file, and put the following code in it (no indentation):", - "\timport pip", - "\tpip.main(['install', 'pyyaml'])", - ] - - blender_install_path = next(iter(bpy.utils.script_paths()), None) - - if blender_install_path is not None: - blender_install_path = join(blender_install_path, "..") - blender_python_path = join(blender_install_path, "python", "bin", "python.exe") - blender_module_path = join(blender_install_path, "python", "lib", "site-packages") - - messages.append("If that doesn't help either, run the following command from an administrator command prompt:") - messages.append(f"\t\"{blender_python_path}\" -m pip install pyyaml -t \"{blender_module_path}\"") - - messages.append("You can learn more about running Blender scripts under https://tinyurl.com/cp2077blenderpython") - for message in messages: - show_message(message) - print(message) - C = bpy.context +def try_import_yaml(): + try: + import yaml + yamlavail=True + except ModuleNotFoundError: + from ..install_dependency import install_dependency + try: + install_dependency('pyyaml') + except Exception as e: + print(e) + show_message('Attempted install of PYYAML failed, please see console for more information') + messages = [ + "pyyaml not available. Please start Blender as administrator." + "If that doesn't help, switch to Blender's scripting perspective, create a new file, and put the following code in it (no indentation):", + "\timport pip", + "\tpip.main(['install', 'pyyaml'])", + ] + + blender_install_path = next(iter(bpy.utils.script_paths()), None) + + if blender_install_path is not None: + blender_install_path = join(blender_install_path, "..") + blender_python_path = join(blender_install_path, "python", "bin", "python.exe") + blender_module_path = join(blender_install_path, "python", "lib", "site-packages") + + messages.append("If that doesn't help either, run the following command from an administrator command prompt:") + messages.append(f"\t\"{blender_python_path}\" -m pip install pyyaml -t \"{blender_module_path}\"") + + messages.append("You can learn more about running Blender scripts under https://tinyurl.com/cp2077blenderpython") + for message in messages: + print(message) + + # function to recursively count nested collections def countChildNodes(collection): if 'expectedNodes' in collection: @@ -381,7 +388,7 @@ def create_static_from_WIMN(node, template_nodes, newHID): newHID+=1 -def exportSectors( filename): +def exportSectors(filename, use_yaml): #Set this to your project directory #filename= '/Volumes/Ruby/archivexlconvert/archivexlconvert.cdproj' #project = '/Volumes/Ruby/archivexlconvert/' @@ -403,8 +410,7 @@ def exportSectors( filename): # Open the blank template streaming sector resourcepath=get_resources_dir() - with open(os.path.join(resourcepath,'empty.streamingsector.json'),'r') as f: - template_json=json.load(f) + template_json=jsonload(resourcepath,'empty.streamingsector.json') template_nodes = template_json["Data"]["RootChunk"]["nodes"] template_nodeData = template_json['Data']['RootChunk']['nodeData']['Data'] ID=0 @@ -435,8 +441,7 @@ def exportSectors( filename): print(filepath) if filepath==os.path.join(projpath,projectjson): continue - with open(filepath,'r') as f: - j=json.load(f) + j=jsonload(filepath) nodes = j["Data"]["RootChunk"]["nodes"] t=j['Data']['RootChunk']['nodeData']['Data'] # add nodeDataIndex props to all the nodes in t @@ -840,8 +845,7 @@ def exportSectors( filename): source_sect_coll=bpy.data.collections.get(source_sector) source_sect_json_path=source_sect_coll['filepath'] print(source_sect_json_path) - with open(source_sect_json_path,'r') as f: - source_sect_json=json.load(f) + source_sect_json=jsonload(source_sect_json_path) source_nodes = source_sect_json["Data"]["RootChunk"]["nodes"] print(len(source_nodes),col['nodeIndex']) print(source_nodes[col['nodeIndex']]) @@ -870,8 +874,7 @@ def exportSectors( filename): source_sect_coll=bpy.data.collections.get(source_sector) source_sect_json_path=source_sect_coll['filepath'] print(source_sect_json_path) - with open(source_sect_json_path,'r') as f: - source_sect_json=json.load(f) + source_sect_json=jsonload(source_sect_json_path) source_nodes = source_sect_json["Data"]["RootChunk"]["nodes"] nodes.append(copy.deepcopy(source_nodes[ni])) new_Index=len(nodes)-1 @@ -947,8 +950,7 @@ def exportSectors( filename): source_sect_coll=bpy.data.collections.get(source_sector) source_sect_json_path=source_sect_coll['filepath'] print(source_sect_json_path) - with open(source_sect_json_path,'r') as f: - source_sect_json=json.load(f) + source_sect_json = jsonload(source_sect_json_path) source_nodes = source_sect_json["Data"]["RootChunk"]["nodes"] nodes.append(copy.deepcopy(source_nodes[ni])) new_Index=len(nodes)-1 @@ -1046,7 +1048,8 @@ def exportSectors( filename): sectpathout=os.path.join(projpath,os.path.splitext(os.path.basename(filename))[0]+'.streamingsector.json') with open(sectpathout, 'w') as outfile: json.dump(template_json, outfile,indent=2) - + if use_yaml: + try_import_yaml() xlpathout=os.path.join(xloutpath,os.path.splitext(os.path.basename(filename))[0]+'.archive.xl') to_archive_xl(xlpathout, deletions, expectedNodes) print('Finished exporting sectors from ',os.path.splitext(os.path.basename(filename))[0], ' to ',sectpathout ) diff --git a/i_scene_cp77_gltf/importers/phys_import.py b/i_scene_cp77_gltf/importers/phys_import.py index 04bc376..9ac6a86 100644 --- a/i_scene_cp77_gltf/importers/phys_import.py +++ b/i_scene_cp77_gltf/importers/phys_import.py @@ -1,9 +1,12 @@ -import json +from ..jsontool import jsonload import bpy import bmesh +import time from ..collisiontools.collisions import draw_box_collider, draw_convex_collider, set_collider_props def cp77_phys_import(filepath, rig=None, chassis_z=None): + cp77_addon_prefs = bpy.context.preferences.addons['i_scene_cp77_gltf'].preferences + start_time = time.time() physJsonPath = filepath collision_type = 'VEHICLE' for area in bpy.context.screen.areas: @@ -12,8 +15,7 @@ def cp77_phys_import(filepath, rig=None, chassis_z=None): if space.type == 'VIEW_3D': space.shading.wireframe_color_type = 'OBJECT' - with open(physJsonPath, 'r') as phys: - data = json.load(phys) + data = jsonload(physJsonPath) for index, i in enumerate(data['Data']['RootChunk']['bodies']): bname = (i['Data']['name']['$value']) @@ -88,4 +90,7 @@ def cp77_phys_import(filepath, rig=None, chassis_z=None): if chassis_z is not None: capsule.delta_location[2] = chassis_z bpy.ops.object.transform_apply(location=False, rotation=False, scale=True) - new_collection.objects.link(capsule) \ No newline at end of file + new_collection.objects.link(capsule) + + if not cp77_addon_prefs.non_verbose: + print(f"phys collider Import Time: {(time.time() - start_time)} Seconds") diff --git a/i_scene_cp77_gltf/importers/sector_import.py b/i_scene_cp77_gltf/importers/sector_import.py index 63631c3..0bb2fed 100644 --- a/i_scene_cp77_gltf/importers/sector_import.py +++ b/i_scene_cp77_gltf/importers/sector_import.py @@ -15,7 +15,7 @@ # 3) If you want it to generate the _new collections for you to add new stuff in set am_modding to True # 4) Run it -import json +from ..jsontool import jsonload import glob import os import bpy @@ -245,7 +245,12 @@ def get_tan_pos(inst): return pos def importSectors( filepath='', want_collisions=False, am_modding=False, with_materials=True, remap_depot=False, with_lights=True ): - + cp77_addon_prefs = bpy.context.preferences.addons['i_scene_cp77_gltf'].preferences + if not cp77_addon_prefs.non_verbose: + print('') + print('-------------------- Importing Cyberpunk 2077 Streaming Sectors --------------------') + print('') + start_time = time.time() # Enter the path to your projects source\raw\base folder below, needs double slashes between folder names. path = os.path.join( os.path.dirname(filepath),'source','raw','base') print('path is ',path) @@ -273,8 +278,7 @@ def importSectors( filepath='', want_collisions=False, am_modding=False, with_ma if VERBOSE: print(os.path.join(path,os.path.basename(project)+'.streamingsector.json')) print(filepath) - with open(filepath,'r') as f: - j=json.load(f) + j = jsonload(filepath) sectorName=os.path.basename(filepath)[:-5] t=j['Data']['RootChunk']['nodeData']['Data'] nodes = j["Data"]["RootChunk"]["nodes"] @@ -395,8 +399,7 @@ def importSectors( filepath='', want_collisions=False, am_modding=False, with_ma if VERBOSE: print(projectjson) print(filepath) - with open(filepath,'r') as f: - j=json.load(f) + j=jsonload(filepath) t=j['Data']['RootChunk']['nodeData']['Data'] # add nodeDataIndex props to all the nodes in t @@ -678,8 +681,7 @@ def importSectors( filepath='', want_collisions=False, am_modding=False, with_ma meshname = data['mesh']['DepotPath']['$value'].replace('\\', os.sep) foliageResource=data['foliageResource']['DepotPath']['$value'].replace('\\', os.sep)+'.json' if os.path.exists(os.path.join(path,foliageResource)): - with open(os.path.join(path,foliageResource),'r') as frfile: - frjson=json.load(frfile) + frjson=jsonload(os.path.join(path,foliageResource)) inst_pos=get_pos(inst) Bucketnum=data['populationSpanInfo']['cketCount'] Bucketstart=data['populationSpanInfo']['cketBegin'] @@ -793,8 +795,7 @@ def importSectors( filepath='', want_collisions=False, am_modding=False, with_ma jsonpath = os.path.join(path,mipath)+".json" #print(jsonpath) try: - with open(jsonpath,'r') as jsonpath: - obj=json.load(jsonpath) + obj=jsonload(jsonpath) index = 0 obj["Data"]["RootChunk"]['alpha'] = e['Data']['alpha'] #FIXME: image_format @@ -1249,10 +1250,11 @@ def importSectors( filepath='', want_collisions=False, am_modding=False, with_ma nextpoint.handle_right = righthandlepos # Set the points to be the same nextpoint.co=endpoint.co - - - print('Finished Importing Sectors') - + + print(f"Imported Sector: {sectorName} in {time.time() - start_time}") + print('') + print('-------------------- Finished Importing Cyberpunk 2077 Streaming Sectors --------------------') + print('') # The above is the code thats for the import plugin below is to allow testing/dev, you can run this file to import something diff --git a/i_scene_cp77_gltf/install_dependency.py b/i_scene_cp77_gltf/install_dependency.py index c2ec715..57bd4e9 100644 --- a/i_scene_cp77_gltf/install_dependency.py +++ b/i_scene_cp77_gltf/install_dependency.py @@ -1,6 +1,14 @@ import bpy +import sys +from .main.common import show_message + +_os = sys.platform def install_dependency(dependency_name): + if _os is not 'win32': + print(f"required package: {dependency_name} not found but the plugin is unable to install this automatically on OS other than Windows") + show_message(f"required package: {dependency_name} not found but the plugin is unable to install automatically on OS other than Windows") + return('CANCELLED') print(f"required package: {dependency_name} not found") from pip import _internal as pip print(f"Attempting to install {dependency_name}") diff --git a/i_scene_cp77_gltf/jsontool.py b/i_scene_cp77_gltf/jsontool.py index a278c2c..6ee0eeb 100644 --- a/i_scene_cp77_gltf/jsontool.py +++ b/i_scene_cp77_gltf/jsontool.py @@ -98,7 +98,16 @@ def jsonload(filepath): if not cp77_addon_prefs.non_verbose: print('Building shaders') # Do something for .material.json - + + case _ if base_name.endswith('.gradient.json'): + if not cp77_addon_prefs.non_verbose: + print(f"Processing: {base_name}") + data=load_json(filepath) + if json_ver_validate(data) == False: + if not cp77_addon_prefs.non_verbose: + print(f"invalid gradient.json found at: {filepath} this plugin requires jsons generated using the latest version of Wolvenkit") + show_message(f"found invalid gradient.json: {base_name} this plugin requires jsons generated using the latest version of Wolvenkit") + case _ if base_name.endswith('.mlsetup.json'): if not cp77_addon_prefs.non_verbose: print(f"Processing: {base_name}") @@ -119,6 +128,24 @@ def jsonload(filepath): show_message(f"invalid mltemplate.json: {base_name} import will continue but shaders may be incorrectly setup for this mesh") # Do something for .mlsetup.json + case _ if base_name.endswith('.mt.json'): + if not cp77_addon_prefs.non_verbose: + print(f"Processing: {base_name}") + data=load_json(filepath) + if json_ver_validate(data) == False: + if not cp77_addon_prefs.non_verbose: + print(f"invalid mt.json found at: {filepath} import will continue but shaders may be incorrectly set up for this mesh") + show_message(f"invalid mt.json: {base_name} import will continue but shaders may be incorrectly setup for this mesh") + + case _ if base_name.endswith('.mi.json'): + if not cp77_addon_prefs.non_verbose: + print(f"Processing: {base_name}") + data=load_json(filepath) + if json_ver_validate(data) == False: + if not cp77_addon_prefs.non_verbose: + print(f"invalid mi.json found at: {filepath} import will continue but shaders may be incorrectly set up for this mesh") + show_message(f"invalid mi.json: {base_name} import will continue but shaders may be incorrectly setup for this mesh") + case _ if base_name.endswith('.phys.json'): if not cp77_addon_prefs.non_verbose: print(f"Processing: {base_name}") @@ -163,6 +190,16 @@ def jsonload(filepath): if not cp77_addon_prefs.non_verbose: print(f"invalid hp.json found at: {filepath} this plugin requires jsons generated with the latest version of Wolvenkit") show_message(f"invalid Hair Profile: {base_name} this plugin requires jsons generated with the latest version of Wolvenkit") + + case _ if base_name.endswith('.cfoliage.json'): + if not cp77_addon_prefs.non_verbose: + print(f"Processing: {base_name}") + data=load_json(filepath) + if json_ver_validate(data) == False: + if not cp77_addon_prefs.non_verbose: + print(f"invalid cfoliage.json found at: {filepath} this plugin requires jsons generated with the latest version of Wolvenkit") + show_message(f"invalid cfoliage.json : {base_name} this plugin requires jsons generated with the latest version of Wolvenkit") + case _: if not cp77_addon_prefs.non_verbose: print(f"Incompatible Json: {base_name}") @@ -186,7 +223,7 @@ def openJSON(path, mode='r', ProjPath='', DepotPath=''): inproj=os.path.join(ProjPath,path) if os.path.exists(inproj): - file = open(inproj,mode) + data = jsonload(inproj) else: - file = open(os.path.join(DepotPath,path),mode) - return file \ No newline at end of file + data = jsonload(os.path.join(DepotPath,path)) + return data \ No newline at end of file diff --git a/i_scene_cp77_gltf/main/bartmoss_functions.py b/i_scene_cp77_gltf/main/bartmoss_functions.py index cbd2011..75d7832 100644 --- a/i_scene_cp77_gltf/main/bartmoss_functions.py +++ b/i_scene_cp77_gltf/main/bartmoss_functions.py @@ -6,6 +6,12 @@ ## I get that these are lazy but they're convenient type checks def is_mesh(o: bpy.types.Object) -> bool: return isinstance(o.data, bpy.types.Mesh) + +def world_mtx(armature, bone): + return armature.convert_space(bone, bone.matrix, from_space='POSE', to_space='WORLD') + +def pose_mtx(armature, bone, mat): + return armature.convert_space(bone, mat, from_space='WORLD', to_space='POSE') def is_armature(o: bpy.types.Object) -> bool: # I just found out I could leave annotations like that -> future presto will appreciate knowing wtf I though I was going to return return isinstance(o.data, bpy.types.Armature) @@ -14,16 +20,16 @@ def has_anims(o: bpy.types.Object) -> bool: return isinstance(o.data, bpy.types.Armature) and o.animation_data is not None def rotate_quat_180(self,context): - if context.active_object and context.active_object.rotation_quaternion: - active_obj = context.active_object - active_obj.rotation_mode = 'QUATERNION' + if context.selected_objects is not None: + for obj in context.selected_objects: + obj.rotation_mode = 'QUATERNION' - rotation_quat = Quaternion((0, 0, 1), radians(180)) - active_obj.rotation_quaternion = rotation_quat @ active_obj.rotation_quaternion - bpy.ops.object.transform_apply(location=False, rotation=True, scale=False) - # Update the object to reflect the changes - active_obj.update_tag() - active_obj.update_from_editmode() + rotation_quat = Quaternion((0, 0, 1), radians(180)) + obj.rotation_quaternion = rotation_quat @ obj.rotation_quaternion + bpy.ops.object.transform_apply(location=False, rotation=True, scale=False) + # Update the object to reflect the changes + obj.update_tag() + obj.update_from_editmode() # Update the scene to see the changes bpy.context.view_layer.update() @@ -50,15 +56,32 @@ def calculate_mesh_volume(obj): ## Returns True if the given object has shape keys, works for meshes and curves def hasShapeKeys(obj): if obj.id_data.type in ['MESH', 'CURVE']: - return True if obj.data.shape_keys else False - else: - return False + return obj.data.shape_keys != None + +def getShapeKeyNames(obj): + if hasShapeKeys(obj): + key_names = [] + for key_block in obj.data.shape_keys.key_blocks: + key_names.append(key_block.name) + return key_names + return "" # Return the name of the shape key data block if the object has shape keys. -def getShapeKeyName(obj): +def getShapeKeyByName(obj, name): if hasShapeKeys(obj): - return obj.data.shape_keys.name - return "" + for key_block in obj.data.shape_keys.key_blocks: + if key_block.name == name: + return key_block + return None + +def setActiveShapeKey(obj, name): + shape_key = getShapeKeyByName(obj, name) + if shape_key: + for index, key_block in enumerate(obj.data.shape_keys.key_blocks): + if key_block == shape_key: + obj.active_shape_key_index = index + return shape_key + return False # returns a dictionary with all the property names for the objects shape keys. def getShapeKeyProps(obj): @@ -83,12 +106,18 @@ def getCustomProps(obj): return props # returns a list of modifiers for the given object -def getMods(obj): +def getModNames(obj): mods = [] for mod in obj.modifiers: mods.append(mod.name) return mods +def getModByName(obj, name): + for mod in obj.modifiers: + if mod.name == name: + return mod + return None + # returns a list with the modifier properties of the given modifier. def getModProps(modifier): props = [] diff --git a/i_scene_cp77_gltf/main/common.py b/i_scene_cp77_gltf/main/common.py index 203a565..9494a9b 100644 --- a/i_scene_cp77_gltf/main/common.py +++ b/i_scene_cp77_gltf/main/common.py @@ -2,6 +2,7 @@ import bpy import os import math +from bpy.types import EnumProperty from mathutils import Color import pkg_resources import bmesh @@ -618,3 +619,23 @@ def createHash12Group(): CurMat.links.new(frac2.outputs[0],GroupOutput.inputs[0]) return CurMat +res_dir= get_resources_dir() + +# Path to the JSON file +VCOL_PRESETS_JSON = os.path.join(res_dir, "vertex_color_presets.json") + +def get_color_presets(): + if os.path.exists(VCOL_PRESETS_JSON): + with open(VCOL_PRESETS_JSON, 'r') as file: + return json.load(file) + return {} + +def save_presets(presets): + with open(VCOL_PRESETS_JSON, 'w') as file: + json.dump(presets, file, indent=4) + update_presets_items() + +def update_presets_items(): + presets = get_color_presets() + items = [(name, name, "") for name in presets.keys()] + return items diff --git a/i_scene_cp77_gltf/main/setup.py b/i_scene_cp77_gltf/main/setup.py index 6678527..f136085 100644 --- a/i_scene_cp77_gltf/main/setup.py +++ b/i_scene_cp77_gltf/main/setup.py @@ -45,9 +45,6 @@ def create(self,materialIndex): if self.obj.get("Materials"): rawMat = self.obj["Materials"][materialIndex] - - - verbose=True bpyMat = bpy.data.materials.new(rawMat["Name"]) diff --git a/i_scene_cp77_gltf/material_types/eyegradient.py b/i_scene_cp77_gltf/material_types/eyegradient.py index 3a8987b..7ce0c73 100644 --- a/i_scene_cp77_gltf/material_types/eyegradient.py +++ b/i_scene_cp77_gltf/material_types/eyegradient.py @@ -1,8 +1,7 @@ import bpy import os from ..main.common import * -from ..jsontool import json_ver_validate, openJSON -import json +from ..jsontool import openJSON class EyeGradient: def __init__(self, BasePath,image_format, ProjPath): @@ -14,13 +13,7 @@ def create(self,Data,Mat): # load the gradient profile from the depot - file = openJSON(Data["IrisColorGradient"] + ".json",mode='r', DepotPath=self.BasePath, ProjPath=self.ProjPath) - profile = json.loads(file.read()) - file.close() - valid_json=json_ver_validate(profile) - if not valid_json: - self.report({'ERROR'}, "Incompatible eye gradient json file detected. This add-on version requires materials generated WolvenKit 8.9.1 or higher.") - return + profile = openJSON(Data["IrisColorGradient"] + ".json",mode='r', DepotPath=self.BasePath, ProjPath=self.ProjPath) profile= profile["Data"]["RootChunk"] CurMat = Mat.node_tree pBSDF = CurMat.nodes[loc('Principled BSDF')] diff --git a/i_scene_cp77_gltf/material_types/multilayered.py b/i_scene_cp77_gltf/material_types/multilayered.py index d1d4739..9453330 100644 --- a/i_scene_cp77_gltf/material_types/multilayered.py +++ b/i_scene_cp77_gltf/material_types/multilayered.py @@ -1,8 +1,7 @@ import bpy import os from ..main.common import * -import json -from ..jsontool import openJSON, json_ver_validate, jsonloads +from ..jsontool import openJSON def _getOrCreateLayerBlend(): @@ -268,13 +267,7 @@ def createLayerMaterial(self,LayerName,LayerCount,CurMat,mlmaskpath,normalimgpat def create(self,Data,Mat): Mat['MLSetup']= Data["MultilayerSetup"] - file = openJSON( Data["MultilayerSetup"] + ".json",mode='r',DepotPath=self.BasePath, ProjPath=self.ProjPath) - mlsetup = jsonloads(file.read()) - file.close() - valid_json=json_ver_validate(mlsetup) - if not valid_json: - self.report({'ERROR'}, "Incompatible mlsetup json file detected. This add-on version requires materials generated WolvenKit 8.9.1 or higher.") - return + mlsetup = openJSON( Data["MultilayerSetup"] + ".json",mode='r',DepotPath=self.BasePath, ProjPath=self.ProjPath) mlsetup = mlsetup["Data"]["RootChunk"] xllay = mlsetup.get("layers") if xllay is None: @@ -346,13 +339,7 @@ def create(self,Data,Mat): if Microblend != "null": MBI = imageFromPath(self.BasePath+Microblend,self.image_format,True) - file = openJSON( material + ".json",mode='r',DepotPath=self.BasePath, ProjPath=self.ProjPath) - mltemplate = jsonloads(file.read()) - file.close() - valid_json=json_ver_validate(mltemplate) - if not valid_json: - self.report({'ERROR'}, "Incompatible mltemplate json file detected. This add-on version requires materials generated WolvenKit 8.9.1 or higher.") - return + mltemplate = openJSON( material + ".json",mode='r',DepotPath=self.BasePath, ProjPath=self.ProjPath) mltemplate = mltemplate["Data"]["RootChunk"] OverrideTable = createOverrideTable(mltemplate)#get override info for colors and what not # Mat[os.path.basename(material).split('.')[0]+'_cols']=OverrideTable["ColorScale"] diff --git a/i_scene_cp77_gltf/material_types/multilayeredTerrain.py b/i_scene_cp77_gltf/material_types/multilayeredTerrain.py index 8995be9..f368b4a 100644 --- a/i_scene_cp77_gltf/material_types/multilayeredTerrain.py +++ b/i_scene_cp77_gltf/material_types/multilayeredTerrain.py @@ -1,8 +1,7 @@ import bpy import os from ..main.common import * -import json -from ..jsontool import json_ver_validate, openJSON +from ..jsontool import jsonload class MultilayeredTerrain: def __init__(self, BasePath,image_format,ProjPath): @@ -255,13 +254,8 @@ def createLayerMaterial(self,LayerName,LayerCount,CurMat,mlmaskpath,normalimgpat def create(self,Data,Mat): - file = open(self.BasePath + Data["MultilayerSetup"] + ".json",mode='r') - mlsetup = json.loads(file.read()) - file.close() - valid_json=json_ver_validate(mlsetup) - if not valid_json: - self.report({'ERROR'}, "Incompatible mlsetup json file detected. This add-on version requires materials generated WolvenKit 8.9.1 or higher.") - return + file = self.BasePath + Data["MultilayerSetup"] + ".json" + mlsetup = jsonload(file) mlsetup = mlsetup["Data"]["RootChunk"] xllay = mlsetup.get("layers") if xllay is None: @@ -323,13 +317,8 @@ def create(self,Data,Mat): MBI = imageFromRelPath(Microblend,self.image_format,True,self.BasePath,self.ProjPath) - file = open(self.BasePath + material + ".json",mode='r') - mltemplate = json.loads(file.read()) - file.close() - valid_json=json_ver_validate(mltemplate) - if not valid_json: - self.report({'ERROR'}, "Incompatible mltemplate json file detected. This add-on version requires materials generated WolvenKit 8.9.1 or higher.") - return + file = self.BasePath + material + ".json" + mltemplate = jsonload(file) mltemplate = mltemplate["Data"]["RootChunk"] OverrideTable = createOverrideTable(mltemplate)#get override info for colors and what not diff --git a/i_scene_cp77_gltf/material_types/multilayeredclearcoat.py b/i_scene_cp77_gltf/material_types/multilayeredclearcoat.py index a278dbd..edbf6de 100644 --- a/i_scene_cp77_gltf/material_types/multilayeredclearcoat.py +++ b/i_scene_cp77_gltf/material_types/multilayeredclearcoat.py @@ -1,8 +1,7 @@ import bpy import os from ..main.common import * -import json -from ..jsontool import openJSON, json_ver_validate +from ..jsontool import jsonload class MultilayeredClearCoat: def __init__(self, BasePath,image_format): @@ -273,13 +272,7 @@ def createLayerMaterial(self,LayerName,LayerCount,CurMat,mlmaskpath,normalimgpat def create(self,Data,Mat): - file = open(self.BasePath + Data["MultilayerSetup"] + ".json",mode='r') - mlsetup = json.loads(file.read()) - file.close() - valid_json=json_ver_validate(mlsetup) - if not valid_json: - self.report({'ERROR'}, "Incompatible mlsetup json file detected. This add-on version requires materials generated WolvenKit 8.9.1 or higher.") - return + mlsetup = jsonload((self.BasePath + Data["MultilayerSetup"] + ".json")) mlsetup = mlsetup["Data"]["RootChunk"] xllay = mlsetup.get("layers") if xllay is None: @@ -336,13 +329,8 @@ def create(self,Data,Mat): if Microblend != "null": MBI = imageFromPath(self.BasePath+Microblend,self.image_format,True) - file = open(self.BasePath + material + ".json",mode='r') - mltemplate = json.loads(file.read()) - file.close() - valid_json=json_ver_validate(mltemplate) - if not valid_json: - self.report({'ERROR'}, "Incompatible mltemplate json file detected. This add-on version requires materials generated WolvenKit 8.9.1 or higher.") - return + file = self.BasePath + material + ".json" + mltemplate = jsonload(file) mltemplate = mltemplate["Data"]["RootChunk"] OverrideTable = self.createOverrideTable(mltemplate)#get override info for colors and what not diff --git a/i_scene_cp77_gltf/meshtools/__init__.py b/i_scene_cp77_gltf/meshtools/__init__.py index 9b9f264..7f13119 100644 --- a/i_scene_cp77_gltf/meshtools/__init__.py +++ b/i_scene_cp77_gltf/meshtools/__init__.py @@ -5,12 +5,11 @@ from .meshtools import * from .verttools import * from ..main.bartmoss_functions import * -from ..main.common import get_classes +from ..main.common import get_classes, get_color_presets, save_presets from bpy.props import (StringProperty, EnumProperty) from bpy.types import (Scene, Operator, Panel) from ..cyber_props import CP77RefitList from ..icons.cp77_icons import get_icon -import mathutils class CP77_PT_MeshTools(Panel): bl_label = "Mesh Tools" @@ -35,24 +34,25 @@ def draw(self, context): cp77_addon_prefs = bpy.context.preferences.addons['i_scene_cp77_gltf'].preferences if cp77_addon_prefs.show_modtools: - if cp77_addon_prefs.show_meshtools: - box.label(text="Mesh Cleanup", icon_value=get_icon("TRAUMA")) - row = box.row(align=True) - split = row.split(factor=0.7,align=True) - split.label(text="Merge Distance:") - split.prop(props,"merge_distance", text="", slider=True) - row = box.row(align=True) - split = row.split(factor=0.7,align=True) - split.label(text="Smooth Factor:") - split.prop(props,"smooth_factor", text="", slider=True) + if cp77_addon_prefs.show_meshtools: + box.label(icon_value=get_icon("REFIT"), text="AKL Autofitter:") row = box.row(align=True) - row.operator("cp77.submesh_prep") + split = row.split(factor=0.29,align=True) + split.label(text="Shape:") + split.prop(props, 'refit_json', text="") row = box.row(align=True) - row.operator("cp77.group_verts", text="Group Ungrouped Verts") + row.operator("cp77.auto_fitter", text="Refit Selected Mesh") + row.prop(props, 'fbx_rot', text="", icon='LOOP_BACK', toggle=1) + + box = layout.box() + box.label(icon_value=get_icon("TECH"), text="Modifiers:") row = box.row(align=True) - row.operator('object.delete_unused_vgroups', text="Delete Unused Vert Groups") + split = row.split(factor=0.35,align=True) + split.label(text="Target:") + split.prop(props, "selected_armature", text="") row = box.row(align=True) - row.operator("cp77.rotate_obj") + row.operator("cp77.set_armature", text="Change Armature Target") + box = layout.box() box.label(icon_value=get_icon("SCULPT"), text="Modelling:") row = box.row(align=True) @@ -70,35 +70,60 @@ def draw(self, context): split.prop(props, "mesh_target", text="") row = box.row(align=True) box.operator("cp77.trans_weights", text="Transfer Vertex Weights") + row = box.row(align=True) + row.operator("cp77.rotate_obj") + box = layout.box() - box.label(icon_value=get_icon("REFIT"), text="AKL Autofitter:") + box.label(text="Mesh Cleanup", icon_value=get_icon("TRAUMA")) row = box.row(align=True) - split = row.split(factor=0.29,align=True) - split.label(text="Shape:") - split.prop(props, 'refit_json', text="") + split = row.split(factor=0.7,align=True) + split.label(text="Merge Distance:") + split.prop(props,"merge_distance", text="", slider=True) row = box.row(align=True) - row.operator("cp77.auto_fitter", text="Refit Selected Mesh") - row.prop(props, 'fbx_rot', text="", icon='LOOP_BACK', toggle=1) - box = layout.box() - box.label(icon_value=get_icon("TECH"), text="Modifiers:") + split = row.split(factor=0.7,align=True) + split.label(text="Smooth Factor:") + split.prop(props,"smooth_factor", text="", slider=True) row = box.row(align=True) - split = row.split(factor=0.35,align=True) - split.label(text="Target:") - split.prop(props, "selected_armature", text="") + row.operator("cp77.submesh_prep") row = box.row(align=True) - row.operator("cp77.set_armature", text="Change Armature Target") + row.operator("cp77.group_verts", text="Group Ungrouped Verts") + row = box.row(align=True) + row.operator('object.delete_unused_vgroups', text="Delete Unused Vert Groups") + box = layout.box() - box.label(text="Vertex Colours", icon="MATERIAL") + box.label(text="Vertex Colours", icon="BRUSH_DATA") + row = box.row(align=True) + split = row.split(factor=0.275,align=True) + split.label(text="Preset:") + split.prop(props, "vertex_color_presets", text="") + box.operator("cp77.apply_vertex_color_preset") box.operator("cp77.add_vertex_color_preset") - box.operator("cp77.apply_vertex_color_preset") - box.prop(context.scene, "cp77_vertex_color_preset", text="Select Preset") + box.operator("cp77.delete_vertex_color_preset") + box = layout.box() box.label(text="Material Export", icon="MATERIAL") box.operator("export_scene.hp") box.operator("export_scene.mlsetup") +class CP77DeleteVertexcolorPreset(Operator): + bl_idname = "cp77.delete_vertex_color_preset" + bl_label = "Delete Preset" -class CP77AddVertexColorPreset(Operator): + def execute(self, context): + props = context.scene.cp77_panel_props + preset_name = props.vertex_color_presets + presets = get_color_presets() + if preset_name in presets: + del presets[preset_name] + save_presets(presets) + self.report({'INFO'}, f"Preset '{preset_name}' deleted.") + else: + self.report({'ERROR'}, f"Preset '{preset_name}' not found.") + return {'CANCELLED'} + + return {'FINISHED'} + +class CP77AddVertexcolorPreset(Operator): bl_idname = "cp77.add_vertex_color_preset" bl_label = "Save Vertex Color Preset" bl_parent_id = "CP77_PT_MeshTools" @@ -108,12 +133,12 @@ class CP77AddVertexColorPreset(Operator): name="Color", subtype='COLOR', min=0.0, max=1.0, - size=3, - default=(1, 1, 1) + size=4, + default=(1, 1, 1, 1) # Include alpha in default ) def execute(self, context): - presets = get_colour_presets() + presets = get_color_presets() presets[self.preset_name] = list(self.color) save_presets(presets) self.report({'INFO'}, f"Preset '{self.preset_name}' added.") @@ -121,11 +146,12 @@ def execute(self, context): def invoke(self, context, event): tool_settings = context.tool_settings.vertex_paint - self.color = mathutils.Color.from_scene_linear_to_srgb(tool_settings.brush.color) + color = tool_settings.brush.color + alpha = tool_settings.brush.strength # Assuming alpha can be taken from brush strength + self.color = (*color[:3], alpha) # Combine color and alpha print(self.color) return context.window_manager.invoke_props_dialog(self) - class CP77WeightTransfer(Operator): bl_idname = 'cp77.trans_weights' bl_label = "Transfer weights from one mesh to another" @@ -138,18 +164,33 @@ def execute(self, context): return {"FINISHED"} # Operator to apply a preset -class CP77ApplyVertexColorPreset(Operator): +class CP77ApplyVertexcolorPreset(Operator): bl_idname = "cp77.apply_vertex_color_preset" - bl_label = "Apply Vertex Color Preset" + bl_label = "Apply Vertex color Preset" def execute(self, context): - preset = Scene.cp77_vertex_color_preset - if not preset: - self.report({'ERROR'}, f"Preset '{self.preset_name}' not found.") + props = context.scene.cp77_panel_props + preset_name = props.vertex_color_presets + obj = context.object + if not obj: + show_message("No active object. Please Select a Mesh and try again") + return {'CANCELLED'} + if obj.type != 'MESH': + show_message("The active object is not a mesh.") + return {'CANCELLED'} + if not preset_name: + self.report({'ERROR'}, "No preset selected.") + return {'CANCELLED'} + + presets = get_color_presets() + preset_color = presets.get(preset_name) + if not preset_color: + self.report({'ERROR'}, f"Preset '{preset_name}' not found.") return {'CANCELLED'} - preset_color = preset.append(1.0) # Adding alpha value initial_mode = context.mode + if initial_mode != 'OBJECT': + bpy.ops.object.mode_set(mode='OBJECT') for obj in context.selected_objects: if obj.type != 'MESH': @@ -162,27 +203,26 @@ def execute(self, context): color_layer = mesh.vertex_colors.active.data if initial_mode == 'EDIT_MESH': - bpy.ops.object.mode_set(mode='OBJECT') selected_verts = {v.index for v in mesh.vertices if v.select} - bpy.ops.object.mode_set(mode='EDIT') for poly in mesh.polygons: for loop_index in poly.loop_indices: loop_vert_index = mesh.loops[loop_index].vertex_index if loop_vert_index in selected_verts: color_layer[loop_index].color = preset_color - bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.mode_set(mode='EDIT') + # bpy.ops.object.mode_set(mode='OBJECT') else: for poly in mesh.polygons: for loop_index in poly.loop_indices: loop_vert_index = mesh.loops[loop_index].vertex_index if mesh.vertices[loop_vert_index].select: color_layer[loop_index].color = preset_color - + bpy.ops.object.mode_set(mode=initial_mode) mesh.update() - - bpy.ops.object.mode_set(mode=initial_mode) - self.report({'INFO'}, f"Preset '{self.preset_name}' applied.") + + + self.report({'INFO'}, f"Preset '{preset_name}' applied.") return {'FINISHED'} @@ -286,7 +326,6 @@ def register_meshtools(): for cls in other_classes: if not hasattr(bpy.types, cls.__name__): bpy.utils.register_class(cls) - Scene.cp77_vertex_color_preset = EnumProperty(name="Vertex Color Preset", items=update_presets_items()) def unregister_meshtools(): for cls in reversed(other_classes): @@ -294,5 +333,4 @@ def unregister_meshtools(): bpy.utils.unregister_class(cls) for cls in reversed(operators): if hasattr(bpy.types, cls.__name__): - bpy.utils.unregister_class(cls) - del Scene.cp77_vertex_color_preset \ No newline at end of file + bpy.utils.unregister_class(cls) \ No newline at end of file diff --git a/i_scene_cp77_gltf/meshtools/meshtools.py b/i_scene_cp77_gltf/meshtools/meshtools.py index 617f357..a8d2f5f 100644 --- a/i_scene_cp77_gltf/meshtools/meshtools.py +++ b/i_scene_cp77_gltf/meshtools/meshtools.py @@ -5,15 +5,18 @@ from .verttools import * from ..cyber_props import * from ..main.common import show_message +from ..main.bartmoss_functions import setActiveShapeKey, getShapeKeyNames, getModNames def CP77SubPrep(self, context, smooth_factor, merge_distance): scn = context.scene obj = context.object current_mode = context.mode + if not obj: + show_message("No active object. Please Select a Mesh and try again") + return {'CANCELLED'} if obj.type != 'MESH': - bpy.ops.cp77.message_box('INVOKE_DEFAULT', message="The active object is not a mesh.") - return {'CANCELLED'} - + show_message("The active object is not a mesh.") + return {'CANCELLED'} if current_mode != 'EDIT': bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type="EDGE") @@ -44,37 +47,63 @@ def CP77SubPrep(self, context, smooth_factor, merge_distance): bpy.ops.cp77.message_box('INVOKE_DEFAULT', message=f"Submesh preparation complete. {merged_vertices} verts merged") if context.mode != current_mode: bpy.ops.object.mode_set(mode=current_mode) - def CP77ArmatureSet(self, context): selected_meshes = [obj for obj in bpy.context.selected_objects if obj.type == 'MESH'] props = context.scene.cp77_panel_props target_armature_name = props.selected_armature target_armature = bpy.data.objects.get(target_armature_name) - if len(selected_meshes) >0: + obj = context.object + if not obj: + show_message("No active object. Please Select a Mesh and try again") + return {'CANCELLED'} + if obj.type != 'MESH': + show_message("The active object is not a mesh.") + return {'CANCELLED'} + if len(selected_meshes) > 0: if target_armature and target_armature.type == 'ARMATURE': + # Ensure the target armature has a collection + if not target_armature.users_collection: + target_collection = bpy.data.collections.new(target_armature.name + "_collection") + bpy.context.scene.collection.children.link(target_collection) + target_collection.objects.link(target_armature) + else: + target_collection = target_armature.users_collection[0] + for mesh in selected_meshes: - retargeted=False + retargeted = False for modifier in mesh.modifiers: if modifier.type == 'ARMATURE' and modifier.object is not target_armature: modifier.object = target_armature - retargeted=True - else: - if modifier.type == 'ARMATURE' and modifier.object is target_armature: - retargeted=True - continue + retargeted = True + elif modifier.type == 'ARMATURE' and modifier.object is target_armature: + retargeted = True + continue if not retargeted: armature = mesh.modifiers.new('Armature', 'ARMATURE') - armature.object = target_armature + armature.object = target_armature + + # Set parent + mesh.parent = target_armature + # Unlink the mesh from its original collections + for col in mesh.users_collection: + col.objects.unlink(mesh) + + # Link the mesh to the target armature's collection + target_collection.objects.link(mesh) def CP77UvChecker(self, context): selected_meshes = [obj for obj in bpy.context.selected_objects if obj.type == 'MESH'] bpy_mats=bpy.data.materials - current_mode = context.mode - - - + obj = context.object + current_mode = context.mode + if not obj: + show_message("No active object. Please Select a Mesh and try again") + return {'CANCELLED'} + if obj.type != 'MESH': + show_message("The active object is not a mesh.") + return {'CANCELLED'} for mat in bpy_mats: if mat.name == 'UV_Checker': uvchecker = mat @@ -122,10 +151,17 @@ def CP77UvChecker(self, context): return {'FINISHED'} - def CP77UvUnChecker(self, context): selected_meshes = [obj for obj in bpy.context.selected_objects if obj.type == 'MESH'] + obj = context.object current_mode = context.mode + if not obj: + show_message("No active object. Please Select a Mesh and try again") + return {'CANCELLED'} + if obj.type != 'MESH': + show_message("The active object is not a mesh.") + return {'CANCELLED'} + uvchecker = 'UV_Checker' original_mat_name = None for mesh in selected_meshes: @@ -146,7 +182,6 @@ def CP77UvUnChecker(self, context): if context.mode != current_mode: bpy.ops.object.mode_set(mode=current_mode) - def CP77RefitChecker(self, context): scene = context.scene objects = scene.objects @@ -161,10 +196,58 @@ def CP77RefitChecker(self, context): print('refitter result:', refitter) return refitter +def applyModifierAsShapeKey(obj): + names = getModNames(obj) + print(names) + refitter = None + for name in names: + if 'AutoFitter' in name: + refitter = name + if refitter: + bpy.context.view_layer.objects.active = obj + obj.select_set(True) + + bpy.ops.object.modifier_apply_as_shapekey(keep_modifier=False, modifier=refitter) + print(f"Applied modifier '{name}' as shape key.") + +def applyRefitter(obj): + applyModifierAsShapeKey(obj) + orignames = getShapeKeyNames(obj) + for name in orignames: + if 'AutoFitter' in name: + refitkey = setActiveShapeKey(obj, name) + refitkey.value = 1 + if 'Garment' in name: + gskey = setActiveShapeKey(obj, name) + gskey.value = 1 + bpy.ops.object.shape_key_add(from_mix=True) + + gskey.value = 0 + gskey = setActiveShapeKey(obj, name) + bpy.ops.object.shape_key_remove(all=False) + newnames = getShapeKeyNames(obj) + setActiveShapeKey(obj, 'Basis') + bpy.ops.object.shape_key_remove(all=False) + for name in newnames: + if 'AutoFitter' in name: + refitkey = setActiveShapeKey(obj, name) + refitkey.name = 'Basis' + if name not in orignames: + newgs = setActiveShapeKey(obj, name) + newgs.name = 'GarmentSupport' + def CP77Refit(context, refitter, target_body_path, target_body_name, fbx_rot): selected_meshes = [obj for obj in context.selected_objects if obj.type == 'MESH'] scene = context.scene + obj = context.object + current_mode = context.mode + if not obj: + show_message("No active object. Please Select a Mesh and try again") + return {'CANCELLED'} + if obj.type != 'MESH': + show_message("The active object is not a mesh.") + return {'CANCELLED'} refitter_obj = None r_c = None print(fbx_rot) @@ -191,6 +274,7 @@ def CP77Refit(context, refitter, target_body_path, target_body_name, fbx_rot): print('refitting:', mesh.name, 'to:', target_body_name) lattice_modifier = mesh.modifiers.new(refitter_obj.name, 'LATTICE') lattice_modifier.object = refitter_obj + applyRefitter(mesh) return{'FINISHED'} @@ -255,6 +339,6 @@ def CP77Refit(context, refitter, target_body_path, target_body_name, fbx_rot): for mesh in selected_meshes: lattice_modifier = mesh.modifiers.new(new_lattice.name,'LATTICE') - for mesh in selected_meshes: - print('refitting:', mesh.name, 'to:', new_lattice["refitter_type"]) - lattice_modifier.object = new_lattice + print('refitting:', mesh.name, 'to:', new_lattice["refitter_type"]) + lattice_modifier.object = new_lattice + applyRefitter(mesh) diff --git a/i_scene_cp77_gltf/meshtools/verttools.py b/i_scene_cp77_gltf/meshtools/verttools.py index 41380b2..7f874f2 100644 --- a/i_scene_cp77_gltf/meshtools/verttools.py +++ b/i_scene_cp77_gltf/meshtools/verttools.py @@ -8,32 +8,15 @@ res_dir = get_resources_dir() -script_dir = get_script_dir() - - -# Path to the JSON file -VCOL_PRESETS_JSON = os.path.join(res_dir, "vertex_color_presets.json") - - -def get_colour_presets(): - if os.path.exists(VCOL_PRESETS_JSON): - with open(VCOL_PRESETS_JSON, 'r') as file: - return json.load(file) - return {} - - -def save_presets(presets): - with open(VCOL_PRESETS_JSON, 'w') as file: - json.dump(presets, file, indent=4) - - -def update_presets_items(): - presets = get_colour_presets() - return [(name, name, "") for name in presets.keys()] - def del_empty_vgroup(self, context): - obj = bpy.context + obj = context.object + if not obj: + show_message("No active object. Please Select a Mesh and try again") + return {'CANCELLED'} + if obj.type != 'MESH': + show_message("The active object is not a mesh.") + return {'CANCELLED'} try: for obj in bpy.context.selected_objects: groups = {r: None for r in range(len(obj.vertex_groups))} @@ -47,10 +30,12 @@ def del_empty_vgroup(self, context): print(f"encountered the following error:") print(e) - def find_nearest_vertex_group(obj, vertex): min_distance = math.inf nearest_vertex = None + if not obj: + show_message("No active object. Please Select a Mesh and try again") + return {'CANCELLED'} if obj.type != 'MESH': show_message("The active object is not a mesh.") return {'CANCELLED'} @@ -62,12 +47,13 @@ def find_nearest_vertex_group(obj, vertex): nearest_vertex = v return nearest_vertex - def CP77GroupUngroupedVerts(self, context): C = bpy.context obj = C.object current_mode = C.mode - + if not obj: + show_message("No active object. Please Select a Mesh and try again") + return {'CANCELLED'} if obj.type != 'MESH': show_message("The active object is not a mesh.") return {'CANCELLED'} @@ -100,7 +86,6 @@ def CP77GroupUngroupedVerts(self, context): print(e) return {'FINISHED'} - def trans_weights(self, context): current_mode = context.mode props = context.scene.cp77_panel_props diff --git a/i_scene_cp77_gltf/resources/empty.streamingsector.json b/i_scene_cp77_gltf/resources/empty.streamingsector.json index eb73d97..96d9c7e 100644 --- a/i_scene_cp77_gltf/resources/empty.streamingsector.json +++ b/i_scene_cp77_gltf/resources/empty.streamingsector.json @@ -1,6 +1,6 @@ { "Header": { - "WolvenKitVersion": "8.12.1-nightly.2024-01-08\u002Baf006c7d02fe4693a3d87822873ba322acd94c2d", + "WolvenKitVersion": "8.14.1-nightly.2024-07-21", "WKitJsonVersion": "0.0.8", "GameVersion": 2100, "ExportedDateTime": "2024-01-11T20:29:08.1394444Z", diff --git a/i_scene_cp77_gltf/resources/hair_profile_template.hp.json b/i_scene_cp77_gltf/resources/hair_profile_template.hp.json index 0a0d003..1885645 100644 --- a/i_scene_cp77_gltf/resources/hair_profile_template.hp.json +++ b/i_scene_cp77_gltf/resources/hair_profile_template.hp.json @@ -1,7 +1,7 @@ { "Header": { - "WolvenKitVersion": "8.11.1-nightly.2023-10-15", + "WolvenKitVersion": "8.14.1-nightly.2024-07-21", "WKitJsonVersion": "0.0.8", "GameVersion": 2000, "ExportedDateTime": "2023-10-16T08:30:59.0747987Z", diff --git a/i_scene_cp77_gltf/resources/metal_base_template.mi.json b/i_scene_cp77_gltf/resources/metal_base_template.mi.json index 04f586c..e5425dc 100644 --- a/i_scene_cp77_gltf/resources/metal_base_template.mi.json +++ b/i_scene_cp77_gltf/resources/metal_base_template.mi.json @@ -1,6 +1,6 @@ { "Header": { - "WolvenKitVersion": "8.11.1-nightly.2023-10-15", + "WolvenKitVersion": "8.14.1-nightly.2024-07-21", "WKitJsonVersion": "0.0.8", "GameVersion": 2000, "ExportedDateTime": "2023-10-16T08:30:59.0747987Z", diff --git a/i_scene_cp77_gltf/resources/template.streamingblock.json b/i_scene_cp77_gltf/resources/template.streamingblock.json index 989b6ac..33e9d3d 100644 --- a/i_scene_cp77_gltf/resources/template.streamingblock.json +++ b/i_scene_cp77_gltf/resources/template.streamingblock.json @@ -1,6 +1,6 @@ { "Header": { - "WolvenKitVersion": "8.11.2-nightly.2023-11-14", + "WolvenKitVersion": "8.14.1-nightly.2024-07-21", "WKitJsonVersion": "0.0.8", "GameVersion": 2020, "ExportedDateTime": "2023-11-21T07:26:32.2758491Z", diff --git a/i_scene_cp77_gltf/resources/vertex_color_presets.json b/i_scene_cp77_gltf/resources/vertex_color_presets.json index 5929da8..3212e12 100644 --- a/i_scene_cp77_gltf/resources/vertex_color_presets.json +++ b/i_scene_cp77_gltf/resources/vertex_color_presets.json @@ -2,21 +2,25 @@ "Primary Taillight": [ 0.662745, 0.003922, - 0.003922 + 0.003922, + 1.0 ], "Secondary Taillight": [ 0.905882, 0.003922, - 0.0 + 0.0, + 1.0 ], "Primary Headlight": [ 0.486275, 0.003922, - 0.003922 + 0.003922, + 1.0 ], "Marker Light": [ 0.796079, 0.003922, - 0.003922 + 0.003922, + 1.0 ] } \ No newline at end of file