Files
gtav-src/tools_ng/lib/pipeline/resourcing/converters/converter_map_collision.rb
T
2025-09-29 00:52:08 +02:00

508 lines
21 KiB
Ruby
Executable File

#
# File:: %RS_TOOLSLIB%/pipeline/resourcing/converters/converter_map_collision.rb
# Description:: Map data collision processor.
#
# Author:: Jonny Rivers
# Date:: 19th October 2011
#
#----------------------------------------------------------------------------
# Uses
#----------------------------------------------------------------------------
require 'pipeline/config/projects'
require 'pipeline/content/content_core'
require 'pipeline/os/file'
require 'pipeline/os/path'
require 'pipeline/os/start'
require 'pipeline/projectutil/data_zip'
require 'pipeline/resourcing/converter'
require 'rexml/document'
include Pipeline
include Pipeline::Resourcing
include REXML
#----------------------------------------------------------------------------
# Implementation
#----------------------------------------------------------------------------
module Pipeline
module Resourcing
module Converters
class BoundsProcessorTaskInput
attr_accessor :scenexml_pathname
attr_accessor :bounds_pathname
def initialize(scenexml_pathname, bounds_pathname)
@scenexml_pathname = scenexml_pathname
@bounds_pathname = bounds_pathname
end
end
class BoundsProcessorTaskOutput
attr_accessor :name
attr_accessor :output_directory
def initialize(name, output_directory)
@name = name
@output_directory = output_directory
end
end
class BoundsProcessorTask
attr_accessor :inputs
attr_accessor :output
def initialize(inputs, output)
@inputs = inputs
@output = output
end
end
#
# == Description
# Map Collision processor.
#
class MapConverterCollision < ChildConverterBase
attr_reader :input_file_list
def initialize( project, branch )
super( project, branch )
end
# Build the content.
def build( processedzips, &block )
MapConverter::log().info( "MapConverterCollision::build()" )
maps_output = {}
#build a list of tasks to be processed
bounds_processor_tasks = []
processedzips.each do |processedzip|
bounds_processor_task_output = nil
bounds_processor_task_inputs = []
cache_dir = MapConverter::cache_dir( @branch, processedzip )
files_to_exclude = []#incoming bounds archive files to be removed from the output zip
#build a hash of mapzip nodes to bounds filenames (and append any files to exclude)
mapzip_to_bounds_filename_hash = build_mapzip_to_bounds_filename_hash(processedzip.inputs, files_to_exclude)
if( mapzip_to_bounds_filename_hash.count > 0 ) then
bounds_pathnames_array = []
#build the bounds processor cl input data (there is one run of the BP per call to build())
processedzip.inputs.each do |mapzip|
next unless ( mapzip.children.size > 0 || result == false )
mapzip_scene_xml_pathname = nil
mapzip_bounds_pathname = nil
#find the scene xml path and the bounds zip file for this input
mapzip.children.each do |mapzipChild|
if ( mapzipChild.is_a?( Content::MapSceneXml ) ) then
mapzip_scene_xml_pathname = mapzipChild.filename
end
end
if ( mapzip_to_bounds_filename_hash.has_key?(mapzip) ) then
bounds_filename = mapzip_to_bounds_filename_hash[mapzip]
bounds_pathname = OS::Path::combine( cache_dir, bounds_filename )
bounds_pathnames_array << bounds_pathname
files_to_exclude << OS::Path::get_filename( bounds_pathname )# this data is input only, so add it to the exclusion list
mapzip_bounds_pathname = bounds_pathname
end
bounds_processor_task_inputs << BoundsProcessorTaskInput.new( mapzip_scene_xml_pathname, mapzip_bounds_pathname )
end
#process the bounds
if( bounds_processor_task_inputs.count() > 0 ) then#// if there's anything to process, process away
processed_bounds_output_dir = "#{cache_dir}/processed_bounds"
bounds_processor_task_output = BoundsProcessorTaskOutput.new( processedzip.name, processed_bounds_output_dir )
bounds_processor_tasks << BoundsProcessorTask.new( bounds_processor_task_inputs, bounds_processor_task_output )
end
end
files_to_exclude.each do |file_to_exclude|
MapConverter::log().info( "Bounds Processor excluded #{file_to_exclude}" )
end
# Construct first pass of maps_output hash
maps_output[processedzip] = { :output => [], :exclude => files_to_exclude }
end# each processedzip
# run the bounds processor on all of our jobs
if( bounds_processor_tasks.count() > 0 ) then
cache_dir_raw = MapConverter::cache_dir_raw( @branch )
config_pathname = OS::Path::combine( cache_dir_raw, 'bounds_processor.xml' )
MapConverterCollision.build_bounds_processor_config_xml_from_tasks( config_pathname, bounds_processor_tasks )
command = ''
@branch.in_env do |e|
command = e.subst("#{@@BOUNDS_PROCESSOR_CL} --config #{config_pathname}")
end
MapConverter::log().info( "Running bounds processor: #{command}" )
status, out, err = OS::start( command )
rebuild_success = (status.exitstatus == 0)
# TODO - scan the output directories and build the :output part of maps_output
if( rebuild_success ) then
processedzips.each do |processedzip|
cache_dir = MapConverter::cache_dir( @branch, processedzip )
processed_bounds_output_dir = "#{cache_dir}/processed_bounds"
compressed_files = []
#scoop up any files produced by the BP in the root output folder into their own ibn
processed_files = OS::FindEx::find_files( OS::Path::combine(processed_bounds_output_dir, "*.bnd") )
processed_files.each do |processed_file|
# compress the newly created processed file and add it to our list of new stuff (compressed_files)
zip_pathname = OS::Path::replace_ext( processed_file, "ibn.zip" )
source_files = [processed_file]
ProjectUtil::data_zip_create( zip_pathname, source_files, true )
compressed_files << zip_pathname
end
#build bounds dictionaries for each directory produced by the BP
processed_dirs = OS::FindEx::find_dirs( processed_bounds_output_dir )
processed_dirs.each do |processed_dir|
processed_dir_tokens = processed_dir.split('/')
dictionary_name = processed_dir_tokens[processed_dir_tokens.length - 1]
processed_files = OS::FindEx::find_files( OS::Path::combine(processed_dir, "*.bnd") )
zip_pathname = OS::Path::combine( processed_bounds_output_dir, "#{dictionary_name}.ibd.zip" )
source_files = []
processed_files.each do |processed_file|
source_files << processed_file
end
ProjectUtil::data_zip_create( zip_pathname, source_files, true )
compressed_files << zip_pathname
end
maps_output[processedzip][:output] = compressed_files
end
else
MapConverter::log().error( "Bounds Processor failed." )
end
end
maps_output
end
def build_mapzip_to_bounds_filename_hash(mapzips, files_to_exclude)
mapzip_to_bounds_filename_hash = {}
mapzips.each do |mapzip|
#Determine the filename of the bounds processor friendly data
new_bounds_filename = "#{mapzip.name}_collision.zip"
bounds_filename = "#{mapzip.name}.ibr.zip"#old format bounds collection filename
static_bounds_filename = "#{mapzip.name}_static_bounds.ibn.zip"#oldest format bounds collection filename
found_new_data = false
found_old_data = false
found_oldest_data = false
#Try to find the bounds processor friendly data in the zip
mapzipFiles = ProjectUtil::data_zip_filelist( mapzip.filename )
mapzipFiles.each do |mapzipFile|
if( 0 == mapzipFile.casecmp( new_bounds_filename ) ) then
found_new_data = true
elsif( 0 == mapzipFile.casecmp( bounds_filename ) ) then
found_old_data = true
elsif( 0 == mapzipFile.casecmp( static_bounds_filename ) ) then
found_oldest_data = true
end
end
#need to prevent stale data in the old format making it through
if( found_new_data ) then
mapzip_to_bounds_filename_hash[mapzip] = new_bounds_filename
if( found_old_data ) then
files_to_exclude << bounds_filename
end
if( found_oldest_data ) then
files_to_exclude << static_bounds_filename
end
elsif( found_old_data ) then
mapzip_to_bounds_filename_hash[mapzip] = bounds_filename
if( found_oldest_data ) then
files_to_exclude << static_bounds_filename
end
elsif( found_oldest_data ) then
mapzip_to_bounds_filename_hash[mapzip] = static_bounds_filename
end
end
mapzip_to_bounds_filename_hash
end
#
# Static call for building the bounds processor config.xml
#
def self.build_bounds_processor_config_xml( output_name, bounds_processor_inputs, cache_dir, processed_bounds_output_dir )
config_pathname = OS::Path::combine(cache_dir, "bounds_processor.xml")
file = File.open(config_pathname,"w+")
file.write('<?xml version = "1.0"?>')
file.write("\n")
xml_document = REXML::Document.new()
root_element = xml_document.add_element("BoundsProcessor")
options_element = root_element.add_element("Options")
max_standard_element = options_element.add_element("MaxStandardCompositeSizeXY")
max_standard_element.text = "150"
max_high_element = options_element.add_element("MaxHighDetailCompositeSizeXY")
max_high_element.text = "150"
max_bvh_data_size_element = options_element.add_element("MaxBVHDataSize")
max_bvh_data_size_element.text = "262144"
max_std_bvh_data_size_element = options_element.add_element("MaxStandardMapBVHDataSize")
max_std_bvh_data_size_element.text = "131072"
max_hi_bvh_data_size_element = options_element.add_element("MaxHighDetailMapBVHDataSize")
max_hi_bvh_data_size_element.text = "262144"
max_std_composite_data_size_element = options_element.add_element("MaxStandardMapCompositeDataSize")
max_std_composite_data_size_element.text = "1048576"
max_hi_composite_data_size_element = options_element.add_element("MaxHighDetailMapCompositeDataSize")
max_hi_composite_data_size_element.text = "1048576"
max_colour_palette_element = options_element.add_element("MaxMaterialColourPaletteSize")
max_colour_palette_element.text = "28"
min_prims_element = options_element.add_element("MinPrimitivesPerComposite")
min_prims_element.text = "1500"
parallelise_element = options_element.add_element("Parallelise")
parallelise_element.text = "True"
profile_element = options_element.add_element("Profile")
profile_element.text = "False"
complex_map_collision_splitting_element = options_element.add_element("ComplexMapCollisionSplitting")
complex_map_collision_splitting_element.text = "True"
split_alternatives_to_consider_element = options_element.add_element("SplitAlternativesToConsider")
split_alternatives_to_consider_element.text = "50"
split_divergence_to_consider_element = options_element.add_element("SplitDivergenceToConsider")
split_divergence_to_consider_element.text = "0.015"
max_prop_volume_element = options_element.add_element("MaxNonBVHPropVolume")
max_prop_volume_element.text = "1"
max_prop_primitive_count_element = options_element.add_element("MaxNonBVHPropPrimitiveCount")
max_prop_primitive_count_element.text = "64"
bounds_processor_inputs.each_pair do |name, details|
input_element = root_element.add_element("Input")
input_element.attributes["scenexml"] = details[:scenexml]
if( details.has_key?(:bounds) )
input_element.attributes["bounds"] = details[:bounds]
end
end
output_element = root_element.add_element("Output")
output_element.attributes["name"] = output_name
output_element.attributes["directory"] = processed_bounds_output_dir
xml_formatter = REXML::Formatters::Pretty.new()
xml_formatter.write(xml_document, file)
file.close()
config_pathname
end
#
# Static call for building the bounds processor config.xml
#
def self.build_bounds_processor_config_xml_from_tasks( config_pathname, bounds_processor_tasks )
file = File.open(config_pathname,"w+")
file.write('<?xml version = "1.0"?>')
file.write("\n")
xml_document = REXML::Document.new()
root_element = xml_document.add_element("BoundsProcessor")
options_element = root_element.add_element("Options")
max_standard_element = options_element.add_element("MaxStandardCompositeSizeXY")
max_standard_element.text = "150"
max_high_element = options_element.add_element("MaxHighDetailCompositeSizeXY")
max_high_element.text = "150"
max_bvh_data_size_element = options_element.add_element("MaxBVHDataSize")
max_bvh_data_size_element.text = "262144"
max_std_bvh_data_size_element = options_element.add_element("MaxStandardMapBVHDataSize")
max_std_bvh_data_size_element.text = "131072"
max_hi_bvh_data_size_element = options_element.add_element("MaxHighDetailMapBVHDataSize")
max_hi_bvh_data_size_element.text = "262144"
max_std_composite_data_size_element = options_element.add_element("MaxStandardMapCompositeDataSize")
max_std_composite_data_size_element.text = "1048576"
max_hi_composite_data_size_element = options_element.add_element("MaxHighDetailMapCompositeDataSize")
max_hi_composite_data_size_element.text = "1048576"
max_colour_palette_element = options_element.add_element("MaxMaterialColourPaletteSize")
max_colour_palette_element.text = "28"
min_prims_element = options_element.add_element("MinPrimitivesPerComposite")
min_prims_element.text = "1500"
parallelise_element = options_element.add_element("Parallelise")
parallelise_element.text = "True"
profile_element = options_element.add_element("Profile")
profile_element.text = "False"
complex_map_collision_splitting_element = options_element.add_element("ComplexMapCollisionSplitting")
complex_map_collision_splitting_element.text = "True"
split_alternatives_to_consider_element = options_element.add_element("SplitAlternativesToConsider")
split_alternatives_to_consider_element.text = "50"
split_divergence_to_consider_element = options_element.add_element("SplitDivergenceToConsider")
split_divergence_to_consider_element.text = "0.015"
max_prop_volume_element = options_element.add_element("MaxNonBVHPropVolume")
max_prop_volume_element.text = "1"
max_prop_primitive_count_element = options_element.add_element("MaxNonBVHPropPrimitiveCount")
max_prop_primitive_count_element.text = "64"
tasks_element = root_element.add_element("Tasks")
bounds_processor_tasks.each do |bounds_processor_task|
task_element = tasks_element.add_element("Task")
bounds_processor_task.inputs.each do |input|
input_element = task_element.add_element("Input")
input_element.attributes["scenexml"] = input.scenexml_pathname
if( input.bounds_pathname != nil ) then
input_element.attributes["bounds"] = input.bounds_pathname
end
end
output_element = task_element.add_element("Output")
output_element.attributes["name"] = bounds_processor_task.output.name
output_element.attributes["directory"] = bounds_processor_task.output.output_directory
end
xml_formatter = REXML::Formatters::Pretty.new()
xml_formatter.write(xml_document, file)
file.close()
end
#
# Static call for generating processed collision for preview from a collection of map_zip_nodes
#
def self.build_preview( config, branch, preview_map_zip_nodes, patch_content_group )
processed_filenames = []
processed_map_zip_nodes = get_processed_map_zip_nodes_from_map_zip_nodes( preview_map_zip_nodes )
processed_map_zip_nodes.each do |processed_map_zip_node|
processed_filenames += build_preview_processed( config, branch, processed_map_zip_node, preview_map_zip_nodes )
end
# add all new content to a processed_collision group under the patch_content_group
processed_collision_group = Content::Group.new( 'processed_collision' )
processed_filenames.each do |processed_filename|
processed_collision_group.add_child( Content::Zip::from_filename_and_target( processed_filename, config.project.ind_target ) )
end
patch_content_group.add_child( processed_collision_group )
patch_content_group
end
#
# Static call to determine the processed_map_zip nodes associated with a collection of map_zip nodes
#
def self.get_processed_map_zip_nodes_from_map_zip_nodes( map_zip_nodes )
processed_map_zip_nodes = []
map_zip_nodes.each do |map_zip_node|
map_zip_node.outputs.each do |map_zip_node_output|
if( map_zip_node_output.is_a?( Pipeline::Content::ProcessedMapZip ) ) then
processed_map_zip_nodes << map_zip_node_output
end
end
end
processed_map_zip_nodes.uniq
end
#
# Static call for generating processed collision for preview from a given processed_map_zip node (with a collection of map_zip nodes being previewed for reference)
#
def self.build_preview_processed( config, branch, processed_map_zip_node, preview_map_zip_nodes )
cache_directory = OS::Path.combine( config.temp(), "preview_collision" )
preview_collision_directory = OS::Path.combine( cache_directory, processed_map_zip_node.name )
FileUtils::mkpath( preview_collision_directory )
bounds_processor_inputs = {}
processed_map_zip_node.inputs.each do |map_zip_node|
bounds_processor_inputs[map_zip_node.name] = {}
bounds_processor_inputs[map_zip_node.name][:scenexml] = map_zip_node.children[0].filename
if( preview_map_zip_nodes.index( map_zip_node ) != nil ) then
#this is a mapzip we are previewing, so we get the <map>_collision.zip from the mapzip inputs
map_zip_node.inputs.each do |map_zip_node_input|
next unless map_zip_node_input.is_a?( Pipeline::Content::File )
if( map_zip_node_input.filename.end_with?("_collision.zip") ) then
bounds_processor_inputs[map_zip_node.name][:bounds] = map_zip_node_input.filename
end
end
else
#this is an existing mapzip that hasn't been previewed, so we have to extract it for combination with the preview data
map_zip_filenames = ProjectUtil::data_zip_filelist( map_zip_node.filename )
map_zip_filenames.each do |map_zip_filename|
if( map_zip_filename.end_with?("_collision.zip") ) then
extracted_files = ProjectUtil::data_zip_extract( map_zip_node.filename, preview_collision_directory, true, map_zip_filename )
bounds_processor_inputs[map_zip_node.name][:bounds] = extracted_files[0]
end
end
end
end
config_pathname = MapConverterCollision.build_bounds_processor_config_xml( processed_map_zip_node.name, bounds_processor_inputs, cache_directory, preview_collision_directory )
command = ''
branch.in_env do |e|
command = e.subst("#{@@BOUNDS_PROCESSOR_CL} --config #{config_pathname}")
end
MapConverter::log().info( "Running bounds processor: #{command}" )
status, out, err = OS::start( command )
rebuild_success = (status.exitstatus == 0)
#gather processed bounds data
compressed_files = []
if ( rebuild_success ) then
#scoop up any files produced by the BP in the root output folder into their own ibn
processed_files = OS::FindEx::find_files( OS::Path::combine(preview_collision_directory, "*.bnd") )
processed_files.each do |processed_file|
# compress the newly created processed file and add it to our list of new stuff (compressed_files)
zip_pathname = OS::Path::replace_ext( processed_file, "ibn.zip" )
source_files = [processed_file]
ProjectUtil::data_zip_create( zip_pathname, source_files, true )
compressed_files << zip_pathname
end
#build bounds dictionaries for each directory produced by the BP
processed_dirs = OS::FindEx::find_dirs( preview_collision_directory )
processed_dirs.each do |processed_dir|
processed_dir_tokens = processed_dir.split('/')
dictionary_name = processed_dir_tokens[processed_dir_tokens.length - 1]
processed_files = OS::FindEx::find_files( OS::Path::combine(processed_dir, "*.bnd") )
zip_pathname = OS::Path::combine( preview_collision_directory, "#{dictionary_name}.ibd.zip" )
source_files = []
processed_files.each do |processed_file|
source_files << processed_file
end
ProjectUtil::data_zip_create( zip_pathname, source_files, true )
compressed_files << zip_pathname
end
else
MapConverter::log().error( "Bounds Processor failed." )
end
compressed_files
end
#--------------------------------------------------------------------
# Private
#--------------------------------------------------------------------
private
@@BOUNDS_PROCESSOR_CL = '$(toolsbin)/MapExport/BoundsProcessor.exe'
end
end # Converters module
end # Resourcing module
end # Pipeline module
# %RS_TOOLSLIB%/pipeline/resourcing/converters/converter_map_collision.rb