add files

- First Commit
This commit is contained in:
boo3 2023-11-12 16:09:22 +01:00
parent 28e0956393
commit 5bbbfe960c
15 changed files with 170983 additions and 0 deletions

8
.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Ignore contents of specific folders
done/*
output/*
textures/*
units/*
# Ignore specific folder
net6.0-windows/
output2/

Binary file not shown.

Binary file not shown.

33
ExportModels.bat Normal file
View File

@ -0,0 +1,33 @@
@echo off
setlocal enabledelayedexpansion
rem Get the path of the script
set "scriptPath=%~dp0"
rem Set the relative paths from the script location
set "parserPath=%scriptPath%\net6.0-windows\PD2ModelParser.exe"
set "sourceFolder=%scriptPath%\units"
set "exportCommand=--export="
set "loadCommand=--load="
echo Parsing .model files in %sourceFolder% and its subdirectories...
rem Check if the 'units' folder exists, if not, create it
if not exist "%sourceFolder%" (
mkdir "%sourceFolder%"
echo Created folder: %sourceFolder%
)
for /R "%sourceFolder%" %%F in (*.model) do (
set "loadValue=%%F"
set "exportValue=!loadValue:.model=.gltf!"
echo Processing: !loadValue!
echo Exporting to: !exportValue!
"%parserPath%" %loadCommand%!loadValue! %exportCommand%!exportValue!
)
echo Processing complete.
endlocal

40
ModelCopyForBlender.bat Normal file
View File

@ -0,0 +1,40 @@
@echo off
setlocal enabledelayedexpansion
rem Get the path of the script
set "scriptPath=%~dp0"
rem Set the relative paths from the script location
set "sourceFolder=%scriptPath%\units"
set "outputFolder=%scriptPath%\output"
set "textureFolder=%scriptPath%\textures"
rem Check if folders exist, create them if necessary
if not exist "%sourceFolder%" (
mkdir "%sourceFolder%"
echo Created folder: %sourceFolder%
)
if not exist "%outputFolder%" (
mkdir "%outputFolder%"
echo Created folder: %outputFolder%
)
if not exist "%textureFolder%" (
mkdir "%textureFolder%"
echo Created folder: %textureFolder%
)
echo Step 1: Copy files
rem Step 1: Copy files
robocopy "%sourceFolder%" "%outputFolder%" *.gltf *.bin *.material_config *.object /S
echo Step 2: Copy .texture files
for /r "%sourceFolder%" %%i in (*.texture) do copy "%%i" "%textureFolder%"
echo Step 3: Rename .texture files to .dds
rem Step 3: Rename .texture files to .dds
for /r "%textureFolder%" %%i in (*.texture) do ren "%%i" *.dds
echo Script completed.
pause

37
UnrealImporter.py Normal file
View File

@ -0,0 +1,37 @@
import os
import unreal
# Set the path to the directory containing your .gltf files
gltf_directory = "D:/PayDay2/done"
# Get the AssetTools
asset_tools = unreal.AssetToolsHelpers.get_asset_tools()
# Iterate through .gltf files in the specified directory and its subdirectories
for root, dirs, files in os.walk(gltf_directory):
for file_name in files:
if file_name.endswith(".gltf"):
file_path = os.path.join(root, file_name)
# Extract relative path from the source directory
relative_path = os.path.relpath(file_path, gltf_directory)
# Construct the destination path in the Content Browser
destination_path = "/Game/units/" + os.path.splitext(relative_path)[0]
# Create an import task
import_task = unreal.AssetImportTask()
import_task.filename = file_path
import_task.destination_path = destination_path
# Import the .gltf file
asset_tools.import_asset_tasks([import_task])
# Print feedback
print(f"Importing: {file_path}")
print(f"Destination Path: {destination_path}")
print("------")
print("Batch import complete.")
# Add that after importing all the modlels to remove redundent materials and combine them all into one per unique material

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

282
blender_script.py Normal file
View File

@ -0,0 +1,282 @@
import bpy
import os
import xml.etree.ElementTree as ET
# Display print() in the blender console window for debuging
def print(data):
for window in bpy.context.window_manager.windows:
screen = window.screen
for area in screen.areas:
if area.type == 'CONSOLE':
override = {'window': window, 'screen': screen, 'area': area}
bpy.ops.console.scrollback_append(override, text=str(data), type="OUTPUT")
for material in bpy.data.materials:
bpy.data.materials.remove(material, do_unlink=True)
for mesh in bpy.data.meshes:
bpy.data.meshes.remove(mesh, do_unlink=True)
for image in bpy.data.images:
bpy.data.images.remove(image, do_unlink=True)
# Set the input and output directories
wherethefilesare = 'output' # Set where the files are located from PD2ModelParser.exe
wheretosave = 'done' # Where you want to save the fixed files.
wherethetexturesare = 'textures' # Set where all the files are located
# Get the parent directory of the python file
dir_path = os.path.dirname(os.path.realpath(__file__))
parent_dir = os.path.abspath(os.path.join(dir_path, os.pardir))
# Make valid folder paths using os.path.join
input_directory = os.path.join(parent_dir, wherethefilesare)
output_directory = os.path.join(parent_dir, wheretosave)
texture_directory = os.path.join(parent_dir, wherethetexturesare)
print("Input Directory:", input_directory)
print("Output Directory:", output_directory)
print("Texture Directory:", texture_directory)
def get_object_filepath(file_path):
# Replace the extension with .object
object_file_path = os.path.splitext(file_path)[0] + '.object'
return object_file_path
def set_diffuse_texture_as_base_color(material, texture_path):
# Create an image texture node
texture_node = material.node_tree.nodes.new(type='ShaderNodeTexImage')
texture_node.image = bpy.data.images.load(texture_path)
# Get the principled BSDF shader node
principled_bsdf = material.node_tree.nodes.get("Principled BSDF")
# Connect the texture node to the base color input of the principled BSDF shader node
material.node_tree.links.new(principled_bsdf.inputs["Base Color"], texture_node.outputs["Color"])
def process_xml_file(file_path):
try:
# Open the file and read its contents
with open(file_path, 'r') as file:
file_contents = file.read()
# Parse the XML string
root = ET.fromstring(file_contents)
# Find the value of the 'materials' attribute
materials_value = root.find('.//diesel').get('materials')
# Print the result
print(f"The value of 'materials' is: {materials_value}")
# return the materials_value
return materials_value
except Exception as e:
print(f"Error processing XML file: {e}")
return None
def get_diffuse_texture_name(xml_file_path, material_name):
"""
Extracts the name of the texture in <diffuse_texture> for a given material name in an XML file.
Parameters:
- xml_file_path (str): The path to the XML file.
- material_name (str): The name of the material to search for.
Returns:
- str: The name of the texture if the material is found, else None.
"""
try:
# Parse the XML file
tree = ET.parse(xml_file_path)
root = tree.getroot()
# Find the material with the specified name
target_material = root.find(f".//material[@name='{material_name}']")
if target_material is not None:
# Find the 'diffuse_texture' element within the material
diffuse_texture_element = target_material.find("diffuse_texture")
if diffuse_texture_element is not None:
# Get the value of the 'file' attribute in <diffuse_texture> and extract the texture name
diffuse_texture_path = diffuse_texture_element.get("file")
texture_name = diffuse_texture_path.split("/")[-1] # Extracting the texture name from the file path
return f"{texture_name}.dds"
else:
print(f"Diffuse texture not found for material '{material_name}' in the XML file.")
return None
else:
print(f"Material '{material_name}' not found in the XML file.")
return None
except FileNotFoundError:
print(f"File not found: {xml_file_path}")
return None
except Exception as e:
print(f"Error while processing {xml_file_path}: {e}")
return None
# Main function to process the object
def process_gltf(file_path):
# Get the filename without extension
file_name = os.path.splitext(os.path.basename(file_path))[0]
# Clear existing mesh objects in the scene
bpy.ops.object.select_all(action='SELECT')
bpy.ops.object.delete()
# Import the GLTF file
bpy.ops.import_scene.gltf(filepath=file_path)
# Deselect all objects
bpy.ops.object.select_all(action='DESELECT')
# Find the mesh with the highest poly count
max_poly_count = 0
object_with_max_poly_count = None
for obj in bpy.context.scene.objects:
if obj.type == 'MESH' and len(obj.data.polygons) > max_poly_count:
max_poly_count = len(obj.data.polygons)
object_with_max_poly_count = obj
# Select the parent (root) object of the object with the highest poly count
if object_with_max_poly_count:
root_object = object_with_max_poly_count.parent
if root_object:
root_object.select_set(True)
# Set the 3D cursor as the pivot point for the transformation
bpy.context.scene.tool_settings.transform_pivot_point = 'CURSOR'
# Set the location of the selected object to the 3D cursor
root_object.location = bpy.context.scene.cursor.location
# Set the transform rotation mode to XYZ Euler
root_object.rotation_mode = 'XYZ'
# Add the specified rotation to the object
root_object.rotation_euler[0] = -1.5708 # X-axis rotation, -90 degrees in radians
root_object.rotation_euler[1] = 0 # Y-axis rotation, 0 degrees in radians
root_object.rotation_euler[2] = 0 # Z-axis rotation, 0 degrees in radians
# Select all objects in the scene
bpy.ops.object.select_all(action='SELECT')
# Apply all transforms to the selected objects
bpy.ops.object.transform_apply(location=True, rotation=True, scale=True)
# Select meshes that start with the name "g_"
bpy.ops.object.select_all(action='DESELECT')
for obj in bpy.context.scene.objects:
if obj.type == 'MESH' and obj.name.startswith("g_"):
obj.select_set(True)
# Invert the selection and delete the unselected objects
bpy.ops.object.select_all(action='INVERT')
bpy.ops.object.delete()
# Join remaining objects together
bpy.ops.object.select_all(action='DESELECT')
#Mesh objects
MSH_OBJS = [m for m in bpy.context.scene.objects if m.type == 'MESH']
# Check if there are any objects in the list
if MSH_OBJS:
for OBJS in MSH_OBJS:
# Select all mesh objects
OBJS.select_set(state=True)
# Makes one active
bpy.context.view_layer.objects.active = OBJS
# Joins objects
bpy.ops.object.join()
# Rename the object to the filename
bpy.ops.object.select_all(action='DESELECT')
bpy.ops.object.select_by_type(type='MESH')
bpy.context.selected_objects[0].name = file_name
# Get mat config
object_file_path = get_object_filepath(file_path)
material_config = process_xml_file(object_file_path)
# Loop each material slot get the name
for material_slot in bpy.context.selected_objects[0].material_slots:
# Get the material in the current slot
material = material_slot.material
# Get the material name
material_name = material.name
# Initialize the variable with a default value
diffuse_texture_name = None
# Get the diffuse texture name using get_diffuse_texture_name()
# Makes this not hardcoded
if material_config is not None:
get_material_config_path = material_config.replace("units/", "D:/PayDay2/output/") + ".material_config"
diffuse_texture_name = get_diffuse_texture_name(get_material_config_path, material_name)
else:
print("Material config is None. Handle this case accordingly.")
# CRASHES WHEN IT CANT FIND THE FILES TO LAZY TO FIX IT JUST HAVE THE FILES
# Check if the texture exists
if diffuse_texture_name:
# Construct the full path to the texture
texture_path = os.path.join(texture_directory, diffuse_texture_name)
# Check if the texture file exists before trying to load it
if os.path.exists(texture_path):
# Set the diffuse texture as the base color
set_diffuse_texture_as_base_color(material, texture_path)
else:
print(f"Texture not found for material {material_name}: {texture_path}")
else:
print(f"No diffuse texture found for material {material_name}")
# Ensure the output directory exists
output_path = os.path.join(output_directory, os.path.relpath(file_path, start=input_directory))
os.makedirs(os.path.dirname(output_path), exist_ok=True)
# Select all objects in the scene
bpy.ops.object.select_all(action='SELECT')
# Export as .gltf to the output directory
bpy.ops.export_scene.gltf(filepath=output_path, export_format='GLTF_EMBEDDED', use_selection=True)
# Remove materials and meshes
for material in bpy.data.materials:
bpy.data.materials.remove(material, do_unlink=True)
for mesh in bpy.data.meshes:
bpy.data.meshes.remove(mesh, do_unlink=True)
for image in bpy.data.images:
bpy.data.images.remove(image, do_unlink=True)
else:
print("No objects selected.")
for material in bpy.data.materials:
bpy.data.materials.remove(material, do_unlink=True)
for mesh in bpy.data.meshes:
bpy.data.meshes.remove(mesh, do_unlink=True)
for image in bpy.data.images:
bpy.data.images.remove(image, do_unlink=True)
# Recursive function to process all .gltf files in a directory
def process_directory(directory):
for root, dirs, files in os.walk(directory):
for file in files:
if file.lower().endswith('.gltf'):
file_path = os.path.join(root, file)
process_gltf(file_path)
# Start processing from the input directory
process_directory(input_directory)

139
pdworld2json.py Normal file
View File

@ -0,0 +1,139 @@
import json
import os
from scipy.spatial.transform import Rotation
import xml.etree.ElementTree as ET
import numpy as np
def parse_xml_to_json(xml_file_path, json_file_path, include_unit_id=False):
# Parse the XML file
tree = ET.parse(xml_file_path)
root = tree.getroot()
# Create a list to store the parsed data
world_data = []
# Keep track of unique instances of the "path" attribute
unique_paths = set()
# Iterate through the "unit_data" elements and extract the desired attributes
for unit_data_elem in root.findall('.//unit_data'):
position_str = unit_data_elem.get('position')
rotation_quaternion_str = unit_data_elem.get('rotation')
full_name = unit_data_elem.get('name')
unit_id = unit_data_elem.get('unit_id') if include_unit_id else None
# Replace commas with dots in the position and quaternion strings
position_str = position_str.replace(',', '.')
rotation_quaternion_str = rotation_quaternion_str.replace(',', '.')
# Extract name and path
unit_description = full_name.split('/')[-1]
path = full_name
# Track unique instances of the "path" attribute
unique_paths.add(path)
# Convert position and quaternion to vectors
try:
position_vector = list(map(float, position_str.split()))
# Convert right-handed to left-handed coordinates
position_vector[0] = -position_vector[0]
quaternion = np.array(list(map(float, rotation_quaternion_str.split())))
rotation = Rotation.from_quat(quaternion)
euler_angles = rotation.as_euler('zyx')
# Convert radians to degrees and clamp to 3 decimal places
rotation_vector = np.degrees(euler_angles).round(3).tolist()
rotation_vector[0] = - rotation_vector[0]
# Convert Euler angles to Rotator format
rotator = {
'yaw': rotation_vector[0],
'pitch': rotation_vector[1],
'roll': rotation_vector[2]
}
# Generate Unreal object reference
object_reference = f"/Script/Engine.StaticMesh'/Game/{path}/{unit_description}.{unit_description}'"
# Use 'name' instead of 'unit_id'
unit_data = {
'position': {
'x': position_vector[0],
'y': position_vector[1],
'z': position_vector[2]
},
'rotation_quaternion': rotation_quaternion_str,
'rotation_vector': rotator,
'unit_description': unit_description,
'path': object_reference,
}
if include_unit_id:
unit_data['name'] = unit_id
else:
unit_data['name'] = unit_description
world_data.append(unit_data)
except ValueError:
print(f"Skipping invalid data: {position_str}, {rotation_quaternion_str}")
# Print the unique instances of the "path" attribute
unique_dlc_units = set()
for unique_path in unique_paths:
dlc = unique_path.split('/')[1]
unique_dlc_units.add(dlc)
print(f"Units DLCs in this map: {', '.join(unique_dlc_units)}. Make sure to get them from the game")
# Convert the list to JSON and write it to the output file
with open(json_file_path, 'w') as json_file:
json.dump(world_data, json_file, indent=2)
print(f'Conversion complete. JSON data written to {json_file_path}')
def process_folder(folder_path):
# Ensure the folder exists, create it if it doesn't
full_folder_path = os.path.abspath(folder_path)
if not os.path.exists(full_folder_path):
os.makedirs(full_folder_path)
print(f'Created folder: {full_folder_path}')
# List all files and folders in the current directory
files_and_folders = os.listdir(full_folder_path)
for item in files_and_folders:
item_path = os.path.join(full_folder_path, item)
# If it's a directory, recursively process it
if os.path.isdir(item_path):
print(f'Processing folder: {item_path}')
process_folder(item_path)
# If it's a file with a .continent extension, process it
elif os.path.isfile(item_path) and item.lower().endswith('.continent'):
print(f'Processing file: {item_path}')
# Get the directory of the script using __file__
script_directory = os.path.abspath(os.path.dirname(__file__))
json_file_path = os.path.abspath(os.path.join(script_directory, 'WorldFiles', 'UnrealJSON', f'{os.path.splitext(item)[0]}.json'))
parse_xml_to_json(item_path, json_file_path, include_unit_id=True)
print(f'Created JSON file: {json_file_path}')
# Get The working Dir
root_folder = r'\WorldFiles\DieselWorldCONTINENT'
dir_path = os.path.dirname(os.path.realpath(__file__))
folder_path = dir_path + root_folder
# Start processing
print(f'Starting processing from: {folder_path}')
process_folder(folder_path)

1
requirements.txt Normal file
View File

@ -0,0 +1 @@
scipy==1.11.3