Files
gtav-src/tools_ng/lib/pipeline/resourcing/converters/converter_map_assetcombine.rb
T
2025-09-29 00:52:08 +02:00

841 lines
39 KiB
Ruby
Executable File

#
# File:: %RS_TOOLSLIB%/pipeline/resourcing/converters/converter_map_assetcombine.rb
# Description:: Map data asset combine processor.
#
# Author:: David Muir <david.muir@rockstarnorth.com>
# Date:: 27 October 2011
#
#----------------------------------------------------------------------------
# Uses
#----------------------------------------------------------------------------
require 'pipeline/config/projects'
require 'pipeline/content/content_core'
require 'pipeline/os/start'
require 'pipeline/projectutil/data_zip'
require 'pipeline/resourcing/converter'
require 'rexml/document'
include Pipeline
include Pipeline::Resourcing
include REXML
#----------------------------------------------------------------------------
# Implementation
#----------------------------------------------------------------------------
module Pipeline
module Resourcing
module Converters
#
# == Description
# Map Occlusion processor.
#
class MapConverterAssetCombine < ChildConverterBase
def initialize( project, branch )
super( project, branch )
end
# Invoke the Asset Combine Processor; use its output XML to package
# assets together. Keep the output file along so the SceneXml
# converter can use it.
#
def build( maps, &block )
MapConverter::log().info( "MapConverterAssetCombine::build()" )
cache_dir_raw = MapConverter::cache_dir_raw( @branch )
@maps_output = {}
# Pre-process map list; determine maps that are to use the fallback
# algorithm, ones that don't need processing and ones that are fully
# part of the new pipeline.
@maps_fallback = [] # Array of fallback maps (combine XML separate)
@maps_merged = [] # Array of merged maps (single combined XML for all)
maps.each do |map|
# Determine if we have new exported data and whether we need to run;
# if there are already drawable dictionaries in the mapzip then
# we just return success as we need more-naive export data.
requires_assetcombine = true # assume true
maptype = nil
map.inputs.each do |input|
next unless ( input.is_a?( Content::MapZip ) )
maptype = input.inputs[0].maptype
requires_assetcombine = false if ( :props == maptype or :interior == maptype )
zip_files = ProjectUtil::data_zip_filelist( input.filename )
zip_files.each do |filename|
requires_assetcombine = false if ( File::fnmatch?( '*.idd.zip', filename ) )
end
end
if ( not requires_assetcombine ) then
MapConverter::log().warn( "Map does not require Asset Combine stage." )
# Return converter output; we're not changing any content or
# needing to exclude any assets on this occassion.
@maps_output[map] = {}
@maps_output[map][:success] = true
@maps_output[map][:output] = []
@maps_output[map][:exclude] = []
next
end
map_merged = map.parent.find_by_script( "content.xml_type == 'mapcombinezip'" ).shift
if ( map_merged.nil? ) then
MapConverter::log().warn( "No map combine zip available. Using fallback algorithms for '#{map.name}'." )
@maps_fallback << map
else
@maps_merged << map
end
end
# Now we know which maps need to be processed; and how lets invoke
# the Asset Combine Processor as required. First of all process the
# fallback maps.
@maps_fallback.each do |map|
# This branch invokes the AssetCombine processor utilising the
# fallback algorithms; and only specifying the single maps
# SceneXml file rather than the entire areas data.
scenexml_files = []
export_definitions = false
map.inputs.each do |input|
next unless ( input.is_a?( Content::MapZip ) )
export_definitions = true if ( input.inputs[0].exportdefinitions )
input.children.each do |child|
next unless ( child.is_a?( Pipeline::Content::MapSceneXml ) )
scenexml_files << child.filename
end
end
# Re-generate the Asset Combine XML data by invoking the serialiser
# and reading the data file to do the packaging.
if ( export_definitions ) then
if ( scenexml_files.size() > 0 ) then
MapConverter::log().info( "Map #{map.name} using fallback algorithms for data packaging." )
@branch.in_env do |e|
cache_dir_raw = MapConverter::cache_dir_raw( @branch )
cache_dir = MapConverter::cache_dir( @branch, map )
output_file = OS::Path::combine( cache_dir, 'asset_combine.xml' )
scenes_file = OS::Path::combine( cache_dir, 'asset_combine_scenes.txt' )
# Write Scenes Input file.
File::open( scenes_file, 'w' ) do |fp|
scenexml_files.each do |filename|
fp.puts filename
end
end
e.add( 'output', output_file )
e.add( 'cache', cache_dir_raw )
command = e.subst( "#{@@PROCESSOR} #{@@ARGS} --fallback --scenes #{scenes_file}" )
MapConverter::log().info( "Processor command: #{command}" )
status, out, err = OS::start( command )
if ( status.exitstatus > 0 ) then
MapConverter::log().error( "Map Asset Combine processing failed [fallback]." )
end
package_output = process_packages( output_file )
if( not package_output.empty?() )
puts "PACKAGE KEYS: #{package_output.keys.join(', ')}"
puts " MAP KEYS: #{package_output[map.name].keys.join(', ')}"
puts " OUTPUT: #{package_output[map.name][:output].join(', ')}" \
if ( package_output[map.name].has_key?( :output ) )
puts " EXCLUDE: #{package_output[map.name][:exclude].join(', ')}" \
if ( package_output[map.name].has_key?( :exclude ) )
puts " COMBINE: #{package_output[map.name][:combine].join(', ')}" \
if ( package_output[map.name].has_key?( :combine ) )
@maps_output[map] = {} unless ( @maps_output.has_key?( map ) )
@maps_output[map][:asset_combine] = output_file
@maps_output[map][:output] += package_output[map.name][:output] \
if ( package_output[map.name].has_key?( :output ) )
# We include the :combine in the output so it gets
# packed with the map data.
@maps_output[map][:output] = [] unless ( @maps_output[map].has_key?( :output ) )
@maps_output[map][:output] += package_output[map.name][:combine] \
if ( package_output[map.name].has_key?( :combine ) )
@maps_output[map][:exclude] = [] unless ( @maps_output[map].has_key?( :exclude ) )
@maps_output[map][:exclude] += package_output[map.name][:exclude] \
if ( package_output[map.name].has_key?( :exclude ) )
else
@maps_output[map] = {}
@maps_output[map][:success] = true
@maps_output[map][:output] = []
@maps_output[map][:exclude] = []
@maps_output[map][:conversion] = []
end
end
else
MapConverter::log().error( "Map '#{map.name}' fallback had no input SceneXml files to drive Asset Combine." )
@maps_output[map] = {}
@maps_output[map][:success] = false
@maps_output[map][:output] = []
@maps_output[map][:exclude] = []
@maps_output[map][:conversion] = []
end
end
end
# Next we process the fully merged maps; invoking the Asset Combine
# Processor utilising regular algorithms.
if ( @maps_merged.size() > 0 ) then
# Construct an array 'scenexml_files' of all SceneXml data files
# for all maps within this map's area.
scenexml_files = []
@maps_merged.each do |map|
map.inputs.each do |input|
next unless ( input.is_a?( Content::MapZip ) )
export_definitions = true if ( input.inputs[0].exportdefinitions )
input.children.each do |child|
next unless ( child.is_a?( Pipeline::Content::MapSceneXml ) )
scenexml_files << child.filename
end
end
end
# Invoke Asset Combine Processor once for all merged maps.
MapConverter::log().info( "There are #{@maps_merged.size} to drive Asset Combine Processor." )
cache_dir_raw = MapConverter::cache_dir_raw( @branch )
output_file = OS::Path::combine( cache_dir_raw, 'asset_combine.xml' )
scenes_file = OS::Path::combine( cache_dir_raw, 'asset_combine_scenes.txt' )
# Set :asset_combine file.
@maps_merged.each do |map|
@maps_output[map] = {} unless ( @maps_output.has_key?( map ) )
@maps_output[map][:asset_combine] = output_file
end
# Write Scenes Input file.
File::open( scenes_file, 'w' ) do |fp|
scenexml_files.each do |filename|
fp.puts filename
end
end
@branch.in_env do |e|
e.add( 'output', output_file )
e.add( 'cache', cache_dir_raw )
command = e.subst( "#{@@PROCESSOR} #{@@ARGS} --scenes #{scenes_file}" )
MapConverter::log().info( "Processor command: #{command}" )
status, out, err = OS::start( command )
if ( status.exitstatus > 0 ) then
MapConverter::log().error( "Map Asset Combine processing failed." )
end
package_output = process_packages( output_file )
package_output.each_pair do |mapname, output|
merged_map = @project.content.find_first( mapname, 'mapcombinezip' )
if ( merged_map.nil? ) then
# Fix up for exclude list for processed maps.
map = @project.content.find_first( mapname, 'processedmapzip' )
@maps_output[map] = {}
@maps_output[map][:asset_combine] = output_file
@maps_output[map][:exclude] = output[:exclude]
else
@maps_output[merged_map] = {}
@maps_output[merged_map][:asset_combine] = output_file
@maps_output[merged_map][:output] = output[:output]
@maps_output[merged_map][:exclude] = output[:exclude]
@maps_output[merged_map][:combine] = output[:combine]
@maps_output[merged_map][:conversion] = [merged_map]
build_combine_map( merged_map.filename, @maps_output[merged_map][:combine] )
end
end
end
end
@maps_output
end
#--------------------------------------------------------------------
# Private
#--------------------------------------------------------------------
private
@@PROCESSOR = '$(toolsbin)/MapExport/AssetCombineProcessor.exe'
@@ARGS = '--output $(output) --cache $(cache)'
# Build output combine map.
def build_combine_map( output_file, input_files )
MapConverter::log().info( "Packaging up combined map output." )
ProjectUtil::data_zip_create( output_file, input_files )
end
# Process an asset combine XML file.
def process_packages( filename )
package_output = {}
File::open( filename ) do |fp|
xmldoc = Document::new( fp )
xmldoc.elements.each( '/AssetCombineProcessor/Map' ) do |mapnode|
mapname = mapnode.attributes['name']
mapcachedir = mapnode.attributes['cachedir']
MapConverter::log().info( "Processing Map #{mapname}" )
FileUtils::mkdir_p( mapcachedir ) \
unless ( File::directory?( mapcachedir ) )
mapnode.elements.each( 'DrawableDictionary' ) do |dict|
dict_output = build_drawable_dictionary_zip( mapcachedir, dict )
package_output[mapname] = {} unless ( package_output.has_key?( mapname ) )
package_output[mapname][:combine] = [] unless ( package_output[mapname].has_key?( :combine ) )
package_output[mapname][:combine] += dict_output[:combine]
# Handle exclude list.
if ( dict_output.has_key?( :exclude ) ) then
dict_output[:exclude].each_pair do |exclude_mapname, exclude_list|
package_output[exclude_mapname] = {} unless ( package_output.has_key?( exclude_mapname ) )
package_output[exclude_mapname][:exclude] = [] unless ( package_output[exclude_mapname].has_key?( :exclude ) )
package_output[exclude_mapname][:exclude] += exclude_list
end
end
end
mapnode.elements.each( 'TextureDictionary' ) do |dict|
dict_output = build_texture_dictionary_zip( mapcachedir, dict )
package_output[mapname] = {} unless ( package_output.has_key?( mapname ) )
package_output[mapname][:combine] = [] unless ( package_output[mapname].has_key?( :combine ) )
package_output[mapname][:combine] += dict_output[:combine]
# Handle exclude list.
if ( dict_output.has_key?( :exclude ) ) then
dict_output[:exclude].each_pair do |exclude_mapname, exclude_list|
package_output[exclude_mapname] = {} unless ( package_output.has_key?( exclude_mapname ) )
package_output[exclude_mapname][:exclude] = [] unless ( package_output[exclude_mapname].has_key?( :exclude ) )
package_output[exclude_mapname][:exclude] += exclude_list
end
end
end
end
end
package_output
end
# Build a drawable dictionary zip file.
def build_drawable_dictionary_zip( mapcachedir, dict )
throw ArgumentError::new( "Invalid XML Element (#{dict.class})." ) \
unless ( dict.is_a?( Element ) )
dict_output = {}
dict_output[:combine] = []
dict_output[:exclude] = {}
dictname = dict.attributes['name']
lodlevel = dict.attributes['lod']
output_filename = OS::Path::combine( mapcachedir, "#{dictname}.idd.zip" )
# Check whether any inputs are more recent than the output we
# are wanting to create. This dertermines whether we bother building
# it.
build_output = false
if ( File::exists?( output_filename ) )
output_mtime = File::mtime( output_filename )
dict.elements.each( 'Inputs/Input' ) do |input|
source_filename = input.attributes['source']
source_mtime = File::mtime( source_filename )
next unless ( ( output_mtime <=> source_mtime ) < 0 )
build_output = true
end
else
build_output = true
end
# Determine exclude list; need to do this whether we build the
# dictionary or not so the processed map gets packaged correctly.
dict.elements.each( 'Inputs/Input' ) do |input|
# Extract component input.
input_name = input.attributes['name']
input_cache_dir = input.attributes['cachedir']
source_filename = input.attributes['source']
input_filename = OS::Path::combine( input_cache_dir, "#{input_name}.idr.zip" )
#find the processed_map_zip_name
map_zip_name = OS::Path::get_basename( source_filename )
map_zip_node = @project.content.find_first( map_zip_name, 'mapzip' )
processed_map_zip_node = nil
map_zip_node.outputs.each do |map_zip_node_output|
if( map_zip_node_output.is_a?( Content::ProcessedMapZip ) ) then
processed_map_zip_node = map_zip_node_output
end
end
processed_map_zip_name = processed_map_zip_node.name
dict_output[:exclude][processed_map_zip_name] = [] unless ( dict_output[:exclude][processed_map_zip_name].is_a?( Array ) )
dict_output[:exclude][processed_map_zip_name] << OS::Path::get_filename( input_filename )
end
# Build output.
if ( build_output ) then
MapConverter::log().info( "\tBuilding Drawable Dictionary: #{dictname}" )
FileUtils::mkdir_p( OS::Path::get_directory( output_filename ) )
input_files = []
has_embedded_textures = false
dict.elements.each( 'Inputs/Input' ) do |input|
# Extract component input.
input_name = input.attributes['name']
input_cache_dir = input.attributes['cachedir']
source_filename = input.attributes['source']
# Extract input source.
input_list = ProjectUtil::data_zip_extract3( source_filename, input_cache_dir, true, "#{input_name}.idr.zip" )
if ( input_list.size() > 0 ) then
# This means the file was extracted successfully.
input_filename = OS::Path::combine( input_cache_dir, "#{input_name}.idr.zip" )
input_cache_dir = OS::Path::combine( mapcachedir, dictname, input_name )
FileUtils::mkdir_p( input_cache_dir ) unless ( File::directory?( input_cache_dir ) )
temp_files = ProjectUtil::data_zip_extract3( input_filename, input_cache_dir, true )
temp_files.each do |file|
has_embedded_textures = true if ( File::fnmatch?( '*.dds', file ) )
input_file = {}
input_file[:src] = file
input_file[:dst] = OS::Path::combine( input_name, OS::Path::get_filename( file ) )
input_files << input_file
end
else
# File was not found, and not extracted. We can continue
# just don't attempt to pack it.
MapConverter::log().error( "Input map zip '#{source_filename}' does not include required drawable '#{input_name}.idr.zip'." )
end
end
# Create custom RBS for Ragebuilder and LOD object processing.
unless ( lodlevel.nil? ) then
custom_filename = OS::Path::combine( mapcachedir, 'custom.rbs' )
custom_finish_filename = OS::Path::combine( mapcachedir, 'custom_finish.rbs' )
File::open( custom_filename, 'w' ) do |fp|
fp.puts( "set_dwd_type( \"#{lodlevel}\" )" )
fp.puts( "set_auto_texdict( \"true\" )" ) if ( has_embedded_textures )
end
File::open( custom_finish_filename, 'w' ) do |fp|
fp.puts( "set_auto_texdict( \"false\" )" ) if ( has_embedded_textures )
fp.puts( "set_dwd_type( \"HD\" )" )
end
input_files << custom_filename
input_files << custom_finish_filename
end
# All inputs are extracted; lets pick them up.
ProjectUtil::data_zip_create( output_filename, input_files )
else
MapConverter::log().info( "\tSkipping Drawable Dictionary: #{dictname}" )
end
dict_output[:combine] << output_filename
dict_output
end
# Build a texture dictionary zip file.
def build_texture_dictionary_zip( mapcachedir, dict )
throw ArgumentError::new( "Invalid XML Element (#{dict.class})." ) \
unless ( dict.is_a?( Element ) )
dict_output = {}
dict_output[:combine] = []
dict_output[:exclude] = {}
dictname = dict.attributes['name']
output_filename = OS::Path::combine( mapcachedir, "#{dictname}.itd.zip" )
# Check whether any inputs are more recent than the output we
# are wanting to create. This dertermines whether we bother building
# it.
build_output = false
if ( File::exists?( output_filename ) )
output_mtime = File::mtime( output_filename )
dict.elements.each( 'Inputs/Input' ) do |input|
source_filename = input.attributes['source']
source_mtime = File::mtime( source_filename )
next unless ( ( output_mtime <=> source_mtime ) < 0 )
build_output = true
end
else
build_output = true
end
# Determine exclude list; need to do this whether we build the
# dictionary or not so the processed map gets packaged correctly.
dict.elements.each( 'Inputs/Input' ) do |input|
# Extract component input.
input_name = input.attributes['name']
input_cache_dir = input.attributes['cachedir']
source_filename = input.attributes['source']
source_mapname = OS::Path::get_basename( source_filename )
input_filename = OS::Path::combine( input_cache_dir, "#{input_name}.itd.zip" )
dict_output[:exclude][source_mapname] = [] unless ( dict_output[:exclude][source_mapname].is_a?( Array ) )
dict_output[:exclude][source_mapname] << OS::Path::get_filename( input_filename )
end
# Build texture dictionary.
if ( build_output ) then
MapConverter::log().info( "\tBuilding Texture Dictionary: #{dictname}" )
FileUtils::mkdir_p( OS::Path::get_directory( output_filename ) )
input_files = []
dict.elements.each( 'Inputs/Input' ) do |input|
# Extract component input.
input_name = input.attributes['name']
input_cache_dir = input.attributes['cachedir']
source_filename = input.attributes['source']
# Gather the required items
required_items = []
input.elements.each( 'RequiredItems/Item' ) do |requiredItem|
required_items << requiredItem.text
end
filter_items = required_items.size() > 0
# Extract input source.
input_list = ProjectUtil::data_zip_extract3( source_filename, input_cache_dir, true, "#{input_name}.itd.zip" )
if ( input_list.size() > 0 ) then
# This means the file was extracted successfully.
input_filename = OS::Path::combine( input_cache_dir, "#{input_name}.itd.zip" )
FileUtils::mkdir_p( input_cache_dir ) unless ( File::directory?( input_cache_dir ) )
potential_input_files = ProjectUtil::data_zip_extract3( input_filename, input_cache_dir, true )
potential_input_files.each do |potential_input|
potential_input_basename = OS::Path::get_basename( potential_input )
if( (not filter_items) or (required_items.find_index(potential_input_basename)) != nil ) then
input_files << potential_input
else
#try a partial match (for alpha textures)
partial_match = false
required_items.each do |req_item|
next if partial_match
partial_match = potential_input_basename.start_with?( req_item )
end
if( partial_match ) then
input_files << potential_input
else
MapConverter::log().info( "\t\tDidn't take '#{potential_input}' as it isn't required" )
end
end
end
else
# File was not found, and not extracted. We can continue
# just don't attempt to pack it.
MapConverter::log().error( "Input map zip '#{source_filename}' does not include required texture dictionary '#{input_name}.itd.zip'." )
end
end
# All inputs are extracted; lets pick them up.
ProjectUtil::data_zip_create( output_filename, input_files )
else
MapConverter::log().info( "\tSkipping Texture Dictionary: #{dictname}" )
end
dict_output[:combine] << output_filename
dict_output
end
#
# Static call for generating preview data from incoming data and results of an asset combine
#
# map_zip_nodes - the map zip nodes involved in the preview
# geo_content_nodes - the geometry content nodes for preview
# txd_content_nodes - the texture content nodes for preview
# patch_content_group - the content group that will contain the content for preview
#
def self.build_preview( config, branch, map_zip_nodes, geo_content_nodes, txd_content_nodes, patch_content_group )
processed_map_zip_nodes = get_processed_map_zip_nodes_from_map_zip_nodes( map_zip_nodes )
asset_combine_output_pathname = run_assetcombine( branch, processed_map_zip_nodes )
build_patch_content_data( config, geo_content_nodes, txd_content_nodes, asset_combine_output_pathname, patch_content_group )
patch_content_group
end
#
# Static call to determine the processed_map_zip nodes associated with a collection of map_zip nodes
#
def self.get_processed_map_zip_nodes_from_map_zip_nodes( map_zip_nodes )
processed_map_zip_nodes = []
map_zip_nodes.each do |map_zip_node|
map_zip_node.outputs.each do |map_zip_node_output|
if( map_zip_node_output.is_a?( Pipeline::Content::ProcessedMapZip ) ) then
processed_map_zip_nodes << map_zip_node_output
end
end
end
processed_map_zip_nodes.uniq
end
#
# Static call to run the assetcombine on an array of ProcessedMapZip nodes and return the pathname of the output file
#
def self.run_assetcombine( branch, processed_map_zip_nodes )
#Get all required scenexml files
scenexml_files = []
processed_map_zip_nodes.each do |processed_map_zip_node|
processed_map_zip_node.inputs.each do |processed_input|
next unless ( processed_input.is_a?( Content::MapZip ) )
processed_input.children.each do |processed_input_child|
next unless ( processed_input_child.is_a?( Pipeline::Content::MapSceneXml ) )
scenexml_files << processed_input_child.filename
end
end
end
cache_dir_raw = MapConverter::cache_dir_raw( branch )
output_pathname = OS::Path::combine( cache_dir_raw, 'asset_combine.xml' )
scenes_pathname = OS::Path::combine( cache_dir_raw, 'asset_combine_scenes.txt' )
# Write Scenes Input file.
File::open( scenes_pathname, 'w' ) do |fp|
scenexml_files.each do |filename|
fp.puts filename
end
end
branch.in_env do |e|
e.add( 'output', output_pathname )
e.add( 'cache', cache_dir_raw )
command = e.subst( "#{@@PROCESSOR} #{@@ARGS} --scenes #{scenes_pathname}" )
MapConverter::log().info( "Processor command: #{command}" )
status, out, err = OS::start( command )
if ( status.exitstatus > 0 ) then
MapConverter::log().error( "Map Asset Combine processing failed." )
end
end
output_pathname
end
#
# Static call to add content to the patch_content_group based on the incoming data (geometry, textures and assetcombine result)
#
def self.build_patch_content_data( config, geo_content_nodes, txd_content_nodes, asset_combine_output_pathname, patch_content_group )
# pull the drawable nodes into a flat array
drawable_nodes_to_patch = []
geo_content_nodes.each do |geo_content_node|
geo_content_node.children.each do |geo_content_node_child|
geo_content_node_child.children.each do |geo_content_node_grandchild|
drawable_nodes_to_patch << geo_content_node_grandchild
end
end
end
# pull the texture nodes into a flat array
txd_nodes_to_patch = []
txd_content_nodes.each do |txd_content_node|
txd_content_node.children.each do |txd_content_node_child|
txd_nodes_to_patch << txd_content_node_child
end
end
# from the assetcombine data we build
# * a flat array of drawable nodes that are part of a merged drawable dictionary
# * a flat array of txd nodes that are part of a merged texture dictionary
# * a flat array of merged cotent (to prevent duplicate conversions)
#
# We also rebuild any merged data during this stage and add it to the patch_content_group
drawable_nodes_included_in_dictionary = []
txd_nodes_included_in_dictionary = []
merged_content = []
File::open( asset_combine_output_pathname ) do |fp|
xmldoc = Document::new( fp )
xmldoc.elements.each( '/AssetCombineProcessor/Map' ) do |map_node|
map_cache_dir = map_node.attributes['cachedir']
map_node.elements.each( 'DrawableDictionary' ) do |dd_node|
rebuild_required = false
dd_node.elements.each( 'Inputs/Input' ) do |dd_input_node|
ddi_name = dd_input_node.attributes['name']
drawable_nodes_to_patch.each do |drawable_node_to_patch|
if ( 0 == ddi_name.casecmp( drawable_node_to_patch.name ) )
rebuild_required = true
drawable_nodes_included_in_dictionary << drawable_node_to_patch
end
end
end
if( rebuild_required ) then
if( nil == merged_content.find_index( dd_node ) ) then
drawable_dictionary_pathname = rebuild_drawable_dictionary( dd_node, drawable_nodes_to_patch, map_cache_dir )
MapConverter::log().info( "Rebuilt merged drawable dictionary (with patched content) at: #{drawable_dictionary_pathname}" )
patch_content_group.add_child( Content::Zip::from_filename_and_target( drawable_dictionary_pathname, config.project.ind_target ) )
else
merged_content << dd_node# add here to prevent multiple merged content builds
end
end
end
map_node.elements.each( 'TextureDictionary' ) do |td_node|
rebuild_required = false
td_node.elements.each( 'Inputs/Input' ) do |td_input_node|
tdi_name = td_input_node.attributes['name']
txd_nodes_to_patch.each do |txd_node_to_patch|
if ( 0 == tdi_name.casecmp( txd_node_to_patch.name ) )
rebuild_required = true
txd_nodes_included_in_dictionary << txd_node_to_patch
end
end
end
if( rebuild_required ) then
if( nil == merged_content.find_index( td_node ) ) then
texture_dictionary_pathname = rebuild_texture_dictionary( td_node, txd_nodes_to_patch, map_cache_dir )
MapConverter::log().info( "Rebuilt merged texture dictionary (with patched content) at: #{texture_dictionary_pathname}" )
patch_content_group.add_child( Content::Zip::from_filename_and_target( texture_dictionary_pathname, config.project.ind_target ) )
else
merged_content << td_node# add here to prevent multiple merged content builds
end
end
end
end#xmldoc.elements.each( '/AssetCombineProcessor/Map' ) do |map_node|
end#File::open( asset_combine_output_pathname ) do |fp|
# Any drawable content that wasn't part of merged data is pushed straight through to the patch_content_group
drawable_nodes_to_patch.each do |drawable_node_to_patch|
if( nil == drawable_nodes_included_in_dictionary.find_index( drawable_node_to_patch ) ) then
patch_content_group.add_child( drawable_node_to_patch )# no matching dd so push through as normal
end
end
# Any txd content that wasn't part of merged data is pushed straight through to the patch_content_group
txd_nodes_to_patch.each do |txd_node_to_patch|
if( nil == txd_nodes_included_in_dictionary.find_index( txd_node_to_patch ) ) then
patch_content_group.add_child( txd_node_to_patch )# no matching td so push through as normal
end
end
end
#
# Rebuild the drawable dictionary refered to by dd_node, substituting content from drawable_nodes_to_patch where appropriate
#
def self.rebuild_drawable_dictionary( dd_node, drawable_nodes_to_patch, map_cache_dir )
dictionary_name = dd_node.attributes['name']
lod_level = dd_node.attributes['lod']
output_filename = OS::Path::combine( map_cache_dir, "#{dictionary_name}.idd.zip" )
dd_inputs = []
dd_node.elements.each( 'Inputs/Input' ) do |dd_input_node|
input_name = dd_input_node.attributes['name']
source_filename = dd_input_node.attributes['source']
input_substituted = false
drawable_nodes_to_patch.each do |drawable_node_to_patch|
if ( 0 == input_name.casecmp( drawable_node_to_patch.name ) )
input_substituted = true
input_cache_dir = OS::Path::combine( dd_input_node.attributes['cachedir'], dictionary_name, input_name )
FileUtils::mkdir_p( input_cache_dir ) unless ( File::directory?( input_cache_dir ) )
drawable_files = ProjectUtil::data_zip_extract3( drawable_node_to_patch.filename, input_cache_dir, true )
drawable_files.each do |drawable_file|
input_file = {}
input_file[:src] = drawable_file
input_file[:dst] = OS::Path::combine( input_name, OS::Path::get_filename( drawable_file ) )
dd_inputs << input_file
end
end
end
if( not input_substituted ) then
# Extract the idr.zip from its container
input_cache_dir = dd_input_node.attributes['cachedir']
idr_zip_list = ProjectUtil::data_zip_extract3( source_filename, input_cache_dir, true, "#{input_name}.idr.zip" )
if ( idr_zip_list.size() > 0 ) then
# Next, extract the contents of the idr.zip itself
input_filename = OS::Path::combine( input_cache_dir, "#{input_name}.idr.zip" )
input_cache_dir = OS::Path::combine( map_cache_dir, dictionary_name, input_name )
FileUtils::mkdir_p( input_cache_dir ) unless ( File::directory?( input_cache_dir ) )
drawable_files = ProjectUtil::data_zip_extract3( input_filename, input_cache_dir, true )
drawable_files.each do |drawable_file|
input_file = {}
input_file[:src] = drawable_file
input_file[:dst] = OS::Path::combine( input_name, OS::Path::get_filename( drawable_file ) )
dd_inputs << input_file
end
end
end
end
# Create custom RBS for Ragebuilder and LOD object processing.
unless ( lod_level.nil? ) then
custom_filename = OS::Path::combine( map_cache_dir, 'custom.rbs' )
custom_finish_filename = OS::Path::combine( map_cache_dir, 'custom_finish.rbs' )
File::open( custom_filename, 'w' ) do |fp|
fp.puts( "set_dwd_type( \"#{lod_level}\" )" )
end
File::open( custom_finish_filename, 'w' ) do |fp|
fp.puts( "set_dwd_type( \"HD\" )" )
end
dd_inputs << custom_filename
dd_inputs << custom_finish_filename
end
ProjectUtil::data_zip_create( output_filename, dd_inputs )
output_filename
end
#
# Rebuild the texture dictionary refered to by td_node, substituting content from txd_nodes_to_patch where appropriate
#
def self.rebuild_texture_dictionary( td_node, txd_nodes_to_patch, map_cache_dir )
dictionary_name = td_node.attributes['name']
output_filename = OS::Path::combine( map_cache_dir, "#{dictionary_name}.itd.zip" )
td_inputs = []
td_node.elements.each( 'Inputs/Input' ) do |input|
input_name = input.attributes['name']
input_cache_dir = input.attributes['cachedir']
input_substituted = false
txd_nodes_to_patch.each do |txd_node_to_patch|
if ( 0 == input_name.casecmp( txd_node_to_patch.name ) )
input_substituted = true
td_inputs += ProjectUtil::data_zip_extract3( txd_node_to_patch.filename, input_cache_dir, true )
end
end
if( not input_substituted ) then
source_filename = input.attributes['source']
input_list = ProjectUtil::data_zip_extract3( source_filename, input_cache_dir, true, "#{input_name}.itd.zip" )
if ( input_list.size() > 0 ) then
# This means the file was extracted successfully.
input_filename = OS::Path::combine( input_cache_dir, "#{input_name}.itd.zip" )
FileUtils::mkdir_p( input_cache_dir ) unless ( File::directory?( input_cache_dir ) )
td_inputs += ProjectUtil::data_zip_extract3( input_filename, input_cache_dir, true )
end
end
end
ProjectUtil::data_zip_create( output_filename, td_inputs )
output_filename
end
end
end # Converters module
end # Resourcing module
end # Pipeline module
# %RS_TOOLSLIB%/pipeline/resourcing/converters/converter_map_assetcombine.rb