Files
gtav-src/tools_ng/lib/pipeline/resourcing/converters/converter_rage.rb
T
2025-09-29 00:52:08 +02:00

1183 lines
47 KiB
Ruby
Executable File

#
# File:: %RS_TOOLSLIB%/pipeline/resourcing/converters/converter_rage.rb
# Description:: Platform resourcing/conversion pipeline.
#
# Author:: David Muir <david.muir@rockstarnorth.com>
# Date:: 17 April 2009
#
# Content convert specifics for rage assets. This class should not be used
# directly - use the ConvertSystem class instead.
#
#-----------------------------------------------------------------------------
# Uses
#-----------------------------------------------------------------------------
require 'pipeline/config/projects'
require 'pipeline/os/path'
require 'pipeline/os/start'
require 'pipeline/projectutil/data_zip'
require 'pipeline/projectutil/data_rpf'
require 'pipeline/resourcing/common'
require 'pipeline/resourcing/converter'
require 'pipeline/resourcing/converters/converter_rage_packets'
require 'pipeline/resourcing/converters/converter_rage_manifest'
require 'pipeline/resourcing/converters/converter_rage_texturelink'
require 'pipeline/resourcing/path'
require 'pipeline/util/incredibuild'
require 'pipeline/util/incredibuild_xge'
require 'pipeline/util/rage'
require 'systemu'
include Pipeline::Util
#-----------------------------------------------------------------------------
# Implementation
#-----------------------------------------------------------------------------
module Pipeline
module Resourcing
module Converters
#
# == Description
# RAGE converter to convert independent data into platform-specific files
# using the RageBuilder executable.
#
# This class can make use of Xoreax's IncrediBuild XGE to significantly speed
# up the data conversion process.
#
# If XGE is not installed on the client or disabled in the installer then the
# data conversion will happen locally.
#
class RageConverter < ConverterBase
#-------------------------------------------------------------------------
# Attributes
#-------------------------------------------------------------------------
attr_accessor :use_xge
attr_reader :log_filename
module ParcodegenNodeTypes
TextType = 0
AttributeType = 1
ArrayType = 2
end
#-------------------------------------------------------------------------
# Methods
#-------------------------------------------------------------------------
#
# Setup the converter for a project. The converter uses the project's
# enabled targets for conversions.
#
def initialize( project, branch )
super( project, branch )
@conf = Pipeline::Config::instance()
@r = Pipeline::RageUtils.new( @project, @branch.name )
@items = []
@items_dst = []
@tools = {}
@packs = []
# Use XGE if its installed and enabled in local.xml
@use_xge = ( Pipeline::Config::instance().use_xge and Incredibuild.is_installed?() )
@tools = RageConvertTool.parse( @project, @branch.name )
@split_packs = @tools['xbox360'].split_packs if ( @tools.has_key?( 'xbox360' ) )
@split_packs = @tools['win64'].split_packs if ( @tools.has_key?( 'win64' ) and @split_packs.nil? )
@converted_content = []
end
# Return whether this converter can convert the specified content node.
def can_convert?( content )
is_file = ( content.is_a?( Content::Target ) and ( not content.target.is_a?( IndependentTarget ) ) )
is_dir = ( content.is_a?( Content::Directory ) and ( not content.target.is_a?( IndependentTarget ) ) )
( is_file or is_dir )
end
# Prebuild handler; here we preprocess the content and extract the
# pack files, constructing our XGE packets as required. We also sync
# some assets, i.e. as specified by asset links in zip files.
def prebuild( )
RageConverter::log().info( "Prebuilding..." )
# Unpack; and create dependencies list(s).
RageConverter::log().info( "\tUnpack and dependency analysis..." )
preprocess_item_list( )
# Sync dependencies from Perforce.
sync_dependencies( )
RageConverter::log().info( "Prebuild complete." )
end
#
# Builds and converts all the added content. This method can raise a
# BuildError exception which indicates a fatal error in producing the
# image file.
#
def build( &block )
result = true
begin
if @use_xge then
result = build_xge( &block )
else
result = build_local( &block )
end
rescue BuildError => ex
result = false
throw
ensure
# Clear out converted_content
@converted_content = []
end
{ :success => result }
end
# Clear the content from the build list.
def clear( )
@converted_content.clear( )
end
# Add the content to the build list (if valid).
def add_content( content )
if ( content.is_a?( Content::Target ) ) then
content.inputs.each do |input|
if ( input.is_a?( Content::Target ) ) then
src_file = ""
tgt_file = ""
input.target.in_env() do |e|
input_filename = OS::Path.combine( input.path, input.name ) + ".#{input.extension}"
src_file = e.subst( OS::Path.combine( input_filename ) )
end
content.target.in_env() do |e|
content_filename = OS::Path.combine( content.path, content.name ) + ".#{content.extension}"
tgt_file = e.subst( content_filename )
end
if ( convert_to( content, src_file, tgt_file ) ) then
@converted_content << content
end
elsif ( input.is_a?( Content::Directory ) ) then
src_dir = ''
dst_file = ''
input.target.in_env() do |e|
src_dir = e.subst( input.absolute_path )
end
content.target.in_env() do |e|
dst_file = e.subst( content.filename )
end
if ( convert_to( content, src_dir, dst_file ) ) then
@converted_content << content
end
end
end
true
else
RageConverter::log().warn( "Cannot convert content node #{content} as its not a Target (#{content.class})." )
false
end
end
#
# Return our last build output. From XGE we return the local, if we built
# locally then we return an empty string.
#
def get_last_output( )
return '' unless ( @use_xge )
return '' unless ( defined? @log_filename and ::File::exists?( @log_filename ) )
last_output = ''
File::open( @log_filename, 'r' ) do |fp|
last_output << fp.readlines().join( )
end
last_output
end
#
# Return the RageConverter log object.
#
def RageConverter::log( )
@@log = Log.new( "convert_rage" ) if ( @@log.nil? )
@@log
end
#-------------------------------------------------------------------------
# Protected
#-------------------------------------------------------------------------
protected
@@log = nil
#-------------------------------------------------------------------------
# Constants
#-------------------------------------------------------------------------
PACK_MNT = 'pack:/'
RPF_EXT = 'rpf'
ZIP_EXT = 'zip'
PACK_EXTS = [ RPF_EXT, ZIP_EXT ]
# Array of file extension strings that may be found in zip files that
# should NOT be passed through to the convert; they will be extracted.
SKIP_EXTS = [ 'rbs' ]
# Array of RPFs that should not be extracted but converted locally instead.
# DHM 1/12/2009 - FIX ME
# This should be put in the project's config.xml file or something.
LOCAL_RPFS = [ ]
#-------------------------------------------------------------------------
# Utility Classes
#-------------------------------------------------------------------------
#
# == Description
# Utility class to store information about an opened Pack (RPF) file,
# together with its target (see Pipeline::Target).
#
class OpenedPack
attr_reader :content
attr_reader :src
attr_reader :dst
attr_accessor :items
attr_reader :is_dir
def initialize( src, dst, content )
@src = src
@dst = dst
@content = content
@items = []
@is_dir = File::directory?( src )
end
end
#
# Convert a piece of content from src (filename/directory) to dst
# (filename), if the conversion is required.
#
def convert_to( content, src, dst )
throw ArgumentError::new( "Invalid content node, must be Target (#{content.class}).") \
unless ( content.is_a?( Pipeline::Content::Target ) )
return false if !need_convert( content, src, dst )
if ( File::directory?( src ) ) then
if ( not @items_dst.include?( dst ) ) then
real_dst = dst
real_dst = @branch.preview if @c.preview == true
@items << ConvertDirectory::new( content, src, real_dst )
@items_dst << real_dst
end
elsif ( File::file?( src ) or not File.exist?( src ) ) then
if ( not @items_dst.include?( dst ) ) then
real_dst = dst
real_dst = OS::Path.combine( @branch.preview, OS::Path::get_filename( src ) ) if @c.preview == true
@items << ConvertFile::new( content, src, real_dst )
@items_dst << real_dst
end
end
# Reset our content's dirty flag
content.set_dirty( false, true ) if ( content.dirty? )
true
end
#
# Pre-process our ConvertFile array; this step allows us to gather all
# inputs and only extract the independent data once to cover all targets.
#
# I.e. when multiple targets are enabled we will have multiple ConvertFile
# objects in our @items array with the same source filename, different
# destination. We have postponed the extraction and will do it here, once
# for unique IMG/RPFs in the ConvertFile list, replacing those ConvertFile
# objects with new ones for the corresponding targets.
#
# This preprocess aborts immediately for non-XGE builds.
#
def preprocess_item_list( )
unique_source_packs = {}
processed_items = []
@items.each do |convert_item|
if ( convert_item.is_a?( ConvertFile ) ) then
ext = OS::Path::get_extension( convert_item.src )
if ( LOCAL_RPFS.include?( OS::Path::get_filename( convert_item.src ) ) or
( not PACK_EXTS.include?( ext ) ) or
( Resourcing::is_core_asset_in_zip?( convert_item.src ) ) ) then
processed_items << convert_item
next
end
pack_obj = nil
case ext
when RPF_EXT
throw RuntimeError::new( "RPFs cannot be converted; they are no longer an intermediate format." )
when ZIP_EXT
# All well.
else
throw RuntimeError::new( 'Invalid pack extension. Internal error.' )
end
content = convert_item.content
source = convert_item.src
destination = convert_item.dst
target = convert_item.target
if ( unique_source_packs.has_key?( source ) ) then
# Queue new target ConvertItems without pack extraction for
# the current ConvertFile's target.
pack_store = OpenedPack.new( source, destination, content )
unique_source_packs[source].each do |file|
# Determine if the file needs to be converted. This is now handled
# automatically by comparing the file modified date/time.
target_filename = OS::Path::combine(
OS::Path::get_directory( file ),
content.target.platform,
OS::Path::get_filename( file ) )
dst = Resourcing::convert_independent_filename_to_platform( target_filename, content.target )
real_dst = dst
real_dst = OS::Path.combine( @branch.preview, OS::Path::get_filename( dst ) ) if @c.preview == true
if ( need_convert( content, file, real_dst ) ) then
item = ConvertFile::new( content, file, real_dst )
processed_items << item
end
end
pack_store.items = processed_items
@packs << pack_store
else
# Create list of pack content files.
unique_source_packs[source] = []
pack_store = OpenedPack.new( source, destination, content )
cache_img = get_cache_dir( source )
file_list = ProjectUtil::data_zip_extract3( source, cache_img, true )
file_list.each do |file|
ext = OS::Path::get_extension( file )
next if ( SKIP_EXTS.include?( ext ) )
unique_source_packs[source] << file
target_filename = OS::Path::combine(
OS::Path::get_directory( file ),
content.target.platform,
OS::Path::get_filename( file ) )
dst = Resourcing::convert_independent_filename_to_platform( target_filename, content.target )
real_dst = dst
real_dst = OS::Path.combine( @branch.preview, OS::Path::get_filename( dst ) ) if @c.preview == true
if ( need_convert( content, file, real_dst ) ) then
item = ConvertFile::new( content, file, real_dst )
processed_items << item
end
end
pack_store.items = processed_items
@packs << pack_store
end
elsif ( convert_item.is_a?( ConvertDirectory ) ) then
content = convert_item.content
source = convert_item.src
destination = convert_item.dst
target = convert_item.target
# Queue new target ConvertItems without pack extraction for
# the current ConvertFile's target.
pack_store = OpenedPack.new( source, destination, content )
content.inputs[0].inputs.each do |input|
# Determine if the file needs to be converted. This is now
# handled automatically by comparing the file modified
# timestamp.
target_filename = OS::Path::combine(
get_cache_dir( source ),
content.target.platform,
input.filename.sub( source, '' ) )
dst = Resourcing::convert_independent_filename_to_platform( target_filename, content.target )
real_dst = dst
real_dst = OS::Path.combine( @branch.preview, OS::Path::get_filename( dst ) ) if @c.preview == true
if ( need_convert( content, input.filename, real_dst ) ) then
item = ConvertFile::new( content, input.filename, real_dst )
processed_items << item
end
end
pack_store.items = processed_items
@packs << pack_store
else
RageConverter::log().error( "Internal error: invalid convert item objects (#{convert_item.class})." )
end
end
# Switch our old items list with our update preprocessed item list.
@items = ( [] + processed_items )
end
#
# Sync asset dependencies from Perforce.
#
def sync_dependencies( )
RageConverter::log().info( " Syncing asset dependencies..." )
Dir::chdir( @project.root ) do
p4 = SCM::Perforce::new( )
begin
p4.connect( )
begin
RageConverter::log().info( "Syncing metadata structs info" )
p4.run_sync( OS::Path::combine( @toolsconfig, 'content', 'extstructs*' ) )
rescue Exception => ex
RageConverter::log().exception( ex, 'Metadata structs info exception.' )
end
begin
RageConverter::log().info( "Syncing project metadata" )
p4.run_sync( OS::Path::combine( @branch.assets, 'metadata', '...' ) )
rescue Exception => ex
# AJM: This should be undone when bug 806267 is fixed
#RageConverter::log().exception( ex, 'Metadata sync exception.' )
end
begin
RageConverter::log().info( "Syncing fragment tuning data" )
p4.run_sync( OS::Path::combine( @branch.assets, 'fragments', '...' ) )
rescue Exception => ex
RageConverter::log().exception( ex, 'Fragment tuning sync exception.' )
end
# Iterate through items to determine which assets we need to sync.
# I.e. link files for DDS references.
dds_files_to_sync = []
@branch.in_env do |env|
@items.each do |item|
# Texture Dictionaries
if ( item.src.ends_with( '.itd.zip' ) ) then
RageConverter::log().debug( " Checking texture links: #{item.src}..." )
if ( not File::exists?( item.src ) ) then
# Texture Dictionary doesn't exists so we should not open it.
RageConverter::log().error( " Texture dictionary #{item.src} does not exist. Check the object and textures associated with it." )
next
else
# Texture Dictionary exists so we can find dependencies.
tcl_basenames = []
dds_basenames = []
filelist = ProjectUtil::data_zip_filelist( item.src )
filelist.each do |file|
if( file.ends_with( '.tcl' ) ) then
tcl_basenames << OS::Path.get_basename(file).downcase()
elsif( file.ends_with( '.dds' ) ) then
dds_basenames << OS::Path.get_basename(file).downcase()
end
end
tcl_basenames.each do |tcl_basename|
if( dds_basenames.find_index( tcl_basename ) == nil ) then
#open the tcl file and paths to any dds files its tcs references
tcl_data = ProjectUtil::data_zip_readfile( item.src, (tcl_basename + ".tcl" ) )
dds_inputs = TextureLink::get_dds_inputs_from_tcl_data( env, tcl_data )
dds_inputs.each do |dds_input|
dds_files_to_sync << dds_input
end
end
end
end
end
end
end
begin
RageConverter::log().info( "Syncing linked DDS files (#{dds_files_to_sync.size})" )
p4.run_sync( dds_files_to_sync ) if ( dds_files_to_sync.size > 0 )
rescue Exception => ex
# AJM: This should be undone when bug 806267 is fixed
#RageConverter::log().exception( ex, 'DDS sync exception.' )
end
rescue Exception => ex
# Error fetching; but this is temp code.
RageConverter::log().exception( ex, 'Dependency sync exception.' )
ensure
p4.disconnect( )
end
end
end
#
# Build the content list using Xoreax's XGE agent to parallelise the build.
#
def build_xge( &block )
result = true
if ( 0 == @items.size ) then
RageConverter::log().info( "Nothing to convert." )
else
RageConverter::log().info( "Creating XGE conversion packets." )
xge_folder = XGE::get_temp_dir( @project, @branch.name )
xge_packet_folder = OS::Path::combine( xge_folder, 'convert' )
filename = OS::Path.combine( xge_packet_folder, 'convert.xml' )
@log_filename = OS::Path.combine( xge_packet_folder, 'convert.log' )
xge_packets = ConversionPacketList.new( xge_packet_folder, 4 * 1024 * 1024 )
@items.each do |item|
xge_packets.add_item( item )
end
xge_packets.create( )
RageConverter::log().info( "Creating XGE task" )
# Construct XGE Project
xge_project = XGE::Project.new( "#{@project.uiname} #{@branch.name} Asset Conversion" )
xge_taskgroups = {}
# Construct XGE Environments and Tools
@project.targets.each do |platform, target|
next unless ( target.enabled )
xge_env = XGE::Environment.new( "env_#{platform}" )
xge_env.add_variable( 'BinVar', @conf.toolsbin )
xge_env.add_variable( 'Platform', platform )
xge_env.add_tool( XGE::Tool::from_project_ragebuilder( @project, @branch, target, true ) )
xge_env.add_tool( XGE::Tool::from_project_ragebuilder_local( @project, @branch, target, true ) )
# We create two task groups for each platform; one for
# remote conversion and the other to force local conversion.
xge_taskgroups[platform] = XGE::TaskGroup.new( "#{platform} Data Conversion", [], xge_env, xge_env.tools[0], '$(BinVar)' ) \
unless ( xge_taskgroups.has_key?( platform ) )
xge_taskgroups["#{platform}_local"] = XGE::TaskGroup.new( "#{platform} Data Conversion [LOCAL]", [], xge_env, xge_env.tools[1], '$(BinVar)' ) \
unless ( xge_taskgroups.has_key?( "#{platform}_local" ) )
end
# Construct XGE TaskGroups (per platform)
xge_packets.packets.each_pair do |platform, packets|
packets.each do |packet|
task_size = (packet.data_size.to_i / 1024.0).round
task_name = "Packet #{packet.name} (#{task_size}KB)"
task_name += " [LOCAL]" if ( packet.local_only )
task = XGE::Task.new( task_name, packet.filename )
task.caption = "#{platform} #{task_name}"
task.parameters = packet.filename
# Add the task to the right task group depending on the
# packet's "local_only" flag.
# xge_taskgroups will not have that platform if it has been disabled.
if packet.local_only == false then
xge_taskgroups[platform].tasks << task if xge_taskgroups.has_key?( platform )
else
xge_taskgroups["#{platform}_local"].tasks << task if xge_taskgroups.has_key?( "#{platform}_local" )
end
end
end
xge_taskgroups.each_pair do |key, group|
xge_project.add_task( group )
end
xge_project.write( filename )
RageConverter::log().info( "Starting XGE conversion process." )
if ( not XGE::start( filename, @log_filename ) ) then
RageConverter::log().error( "Platform conversion failed. Check Build Monitor for errors." )
# DHM 13 August 2009
# Remove return statement, as then we don't build the
# final image/RPF.
result = false
if ( not @conf.user.username.downcase.include?( User::ASSETBUILDER_USER) )
error_msg = "XGE Platform conversion failed. Check Build Monitor or #{@log_filename} for errors."
GUI::MessageBox::error("XGE Error Notification", error_msg)
end
end
RageConverter::log().info( "Setting file timestamps." )
# Clone all the modified times...
content_status = {}
@items.each do |item|
src_exists = File::exists?( item.src )
dst_exists = File::exists?( item.dst )
RageConverter::log().error( "Source file does not exist: #{item.src} for cloning modified time." ) \
unless ( src_exists )
RageConverter::log().error( "Target file does not exist: #{item.dst} for cloning modified time." ) \
unless ( dst_exists )
if ( src_exists and dst_exists ) then
# We have succeeded but a previous item for this node might
# have failed so don't overwrite that information.
content_status[item.content] = true unless content_status.has_key?( item.content )
else
# Since we have failed we always mark this content node as
# failing.
content_status[item.content] = false
next
end
File::utime( File.mtime(item.src), File.mtime(item.src), item.dst )
end
# Invoke our block for each content's success.
content_status.each_pair do |node, success|
yield( node, success ) if ( block_given? )
result = false unless ( success )
end
end
if false == @c.preview then
RageConverter::log().info( "Rebuilding pack files after conversion." )
# Build any resultant packs
@packs.each do |pack|
RageConverter::log().info( "Rebuilding pack: #{pack.dst}" )
build_pack( pack )
end
end
RageConverter::log().info( "XGE Rage Convert finished." )
result
end
#
# Build the content list locally invoking Ragebuilder directly.
#
def build_local( &block )
result = true
if ( 0 == @items.size ) then
RageConverter::log().info( "Nothing to convert." )
else
RageConverter::log().info( "Creating XGE conversion packets." )
local_folder = get_local_temp_dir( @project, @branch )
local_packet_folder = OS::Path::combine( local_folder, 'convert' )
local_packets = ConversionPacketList.new( local_packet_folder, 4 * 1024 * 1024 )
@items.each do |item|
local_packets.add_item( item )
end
local_packets.create( )
# Construct XGE TaskGroups (per platform)
RageConverter::log().info( "Starting local conversion process." )
failed = false
local_packets.packets.each_pair do |platform, packets|
packets.each do |packet|
tool = XGE::Tool::from_project_ragebuilder_local( @project, @branch, packet.target, false )
command_line = "#{tool.path} #{packet.filename} #{tool.params}"
status = RageConverter::run_command(command_line)
if ( status != 0 )
RageConverter::log().error( "Local platform conversion failed. #{command_line} returned #{status}" )
failed = true
end
end
end
if ( failed and (not @conf.user.username.downcase.include?( User::ASSETBUILDER_USER ) ) )
GUI::MessageBox::error("Local platform conversion - Error Notification", "Local platform conversion failed. Check logs for errors.")
end
RageConverter::log().info( "Setting file timestamps." )
# Clone all the modified times...
content_status = {}
@items.each do |item|
src_exists = File::exists?( item.src )
dst_exists = File::exists?( item.dst )
RageConverter::log().error( "Source file does not exist: #{item.src} for cloning modified time." ) \
unless ( src_exists )
RageConverter::log().error( "Target file does not exist: #{item.dst} for cloning modified time." ) \
unless ( dst_exists )
if ( src_exists and dst_exists ) then
# We have succeeded but a previous item for this node might
# have failed so don't overwrite that information.
content_status[item.content] = true unless content_status.has_key?( item.content )
else
# Since we have failed we always mark this content node as
# failing.
content_status[item.content] = false
next
end
File::utime( File.mtime(item.src), File.mtime(item.src), item.dst )
end
# Invoke our block for each content's success.
content_status.each_pair do |node, success|
yield( node, success ) if ( block_given? )
result = false unless ( success )
end
end
if false == @c.preview then
RageConverter::log().info( "Rebuilding pack files after conversion." )
# Build any resultant packs
@packs.each do |pack|
RageConverter::log().info( "Rebuilding pack: #{pack.dst}" )
build_pack( pack )
end
end
RageConverter::log().info( "Local Rage Convert finished." )
result
end
# Return temporary directory for project and branch for local files.
# Supports native Project and Branch objects as well as Strings.
def get_local_temp_dir( project, branch )
path = OS::Path::combine( Pipeline::Config::instance().temp, 'local' )
path = OS::Path::combine( path, project.name ) if ( project.is_a?( Pipeline::Project ) )
path = OS::Path::combine( path, project ) if ( project.is_a?( String ) )
path = OS::Path::combine( path, branch.name ) if ( branch.is_a?( Pipeline::Branch ) )
path = OS::Path::combine( path, branch ) if ( branch.is_a?( String ) )
path
end
#
# Determine if the pack being built is from an compressed input RPF, if
# so we return true to indicate the converted asset must also be
# compressed.
#
def build_pack_inner_compressed?( file )
# DHM 2011/02/10 - fix non-resources being compressed in platform RPFs.
# Runtime will be fixing this at some point but it doesn't currently
# assert on finding compressed non-resources. Weird.
return ( false )
# We are only concerned with RPF content nodes here.
return ( true ) if ( 0 == file.content.inputs.size )
return ( true ) unless ( file.content.inputs[0].is_a?( Pipeline::Content::RPF ) )
return ( file.content.inputs[0].compress )
end
#
# Use
#
def find_files_in_pack( path, filter = '*.*', relative = false, sorted = false )
pack_files = []
pack_dirs = @r.find_dirs( path )
temp_files = @r.find_files( OS::Path::combine( path, filter ) )
temp_files.each do |temp_file|
pack_files << temp_file unless relative
pack_files << temp_file.sub( path, '' ) if relative
end
pack_dirs.each do |pack_dir|
path_pack_dir = OS::Path::combine( path, pack_dir, filter )
temp_files = @r.find_files( path_pack_dir )
temp_files.each do |temp_file|
pack_files << OS::Path::combine( pack_dir, temp_file )
end
end
pack_files
end
#
# When using XGE we use this function to reassemble the newly converted
# files pack into Image and RPF files.
#
def build_pack( file )
throw ArgumentError::new( "Invalid OpenedPack object (#{file.class})." ) \
unless ( file.is_a?( OpenedPack ) )
cache_img = get_cache_dir( file.src )
FileUtils.mkdir_p( OS::Path::remove_filename(file.dst ) )
start_time = Time::now()
pack_file_metadata = PackfileMetadata::new( )
file_list = []
if ( not file.is_dir() ) then
# This is for OpenedPack objects that are regular files to
# RPF files.
source_files = ProjectUtil::data_zip_filelist( file.src )
source_files.each do |filename|
pack_target = Resourcing::convert_independent_filename_to_platform( filename, file.content.target )
inner_ind = OS::Path.combine( cache_img, filename )
inner_dst = OS::Path::combine( cache_img )
_filename_split = OS::Path::get_parts( pack_target )
if ( _filename_split.size > 1 ) then
# RPF files with directories
_filename_split.each_with_index do |part, index|
inner_dst = OS::Path::combine( inner_dst, part ) if ( 0 == index )
inner_dst = OS::Path::combine( inner_dst, file.content.target.platform, part ) if ( index > 0 )
end
else
# File in root of RPF, or no directories.
inner_dst = OS::Path.combine( inner_dst, file.content.target.platform, pack_target )
end
# Main File Entry
entry = {}
entry[:src] = inner_dst
entry[:dst] = pack_target
file_list << entry
add_hd_texture_dictionaries( inner_ind, inner_dst, pack_target, file.content.target, file_list, pack_file_metadata )
end
elsif ( file.is_dir() ) then
# This is for OpenedPack objects that are directories to RPF files.
file.content.inputs[0].inputs.each do |input|
cache_dir = get_cache_dir( file.content.inputs[0].absolute_path )
rpf_source = OS::Path::combine( cache_dir, file.content.target.platform, input.filename.sub( file.content.inputs[0].absolute_path, '' ) )
rpf_source = Resourcing::convert_independent_filename_to_platform( rpf_source, file.content.target )
rpf_dest = input.filename.sub( file.content.inputs[0].absolute_path, '' )
rpf_dest = Resourcing::convert_independent_filename_to_platform( rpf_dest, file.content.target )
# Main File Entry
entry = {}
entry[:src] = rpf_source
entry[:dst] = rpf_dest
file_list << entry
add_hd_texture_dictionaries( input.filename, rpf_source, rpf_dest, file.content.target, file_list, pack_file_metadata )
end
end
#Add any additional xml definitions
whole_combine_path = OS::Path::combine( @conf.temp, 'manifest', OS::Path::get_basename( file.dst ), "*.*")
additional_manifest_files = @r.find_files( whole_combine_path )
RageConverter::log().info( "Manifest input directory: #{whole_combine_path}." )
RageConverter::log().info( "Number of manifest input files found: #{additional_manifest_files.size}." )
additional_manifest_files.each do |additional_file|
whole_additional_file_path = OS::Path::combine( @conf.temp, 'manifest', OS::Path::get_basename( file.dst ), additional_file )
RageConverter::log().info( "Parsing manifest file input: #{whole_additional_file_path}" )
if( File.exists?( whole_additional_file_path ) ) then
RageConverter::log().info( "exists: #{whole_additional_file_path}" )
File.open( whole_additional_file_path ) do |metadata_file|
doc = Document.new( metadata_file )
# Process IMAP Group Information.
doc.elements.each( "ManifestData/IMAPGroup" ) do |ipl_group|
imap_group_name = ipl_group.attributes["name"]
RageConverter::log().info( "Found IMAP group: #{imap_group_name}" )
# bounds
bounds_items = []
ipl_group.elements.each( "Bounds" ) do |ipl_bound|
bounds_items << ipl_bound.attributes["name"]
end
if bounds_items.count>0 then
pack_file_metadata.add_imap_group_property( imap_group_name, "Bounds", bounds_items, ParcodegenNodeTypes::ArrayType)
end
#type
#waethertypes
actType_items = []
ipl_group.elements.each( "ActivationType" ) do |ipl_actType|
actType_items << ipl_actType.attributes["value"]
end
if actType_items.count>0 then
pack_file_metadata.add_imap_group_property( imap_group_name, "Flags", actType_items.join("|") , ParcodegenNodeTypes::TextType)
end
#waethertypes
weather_items = []
ipl_group.elements.each( "ActiveWeatherType" ) do |ipl_weather|
weather_items << ipl_weather.attributes["value"]
end
if weather_items.count>0 then
pack_file_metadata.add_imap_group_property( imap_group_name, "WeatherTypes", weather_items, ParcodegenNodeTypes::ArrayType)
end
#hours
ipl_group.elements.each( "ActiveHours" ) do |ipl_bound|
pack_file_metadata.add_imap_group_property( imap_group_name, "HoursOnOff", ipl_bound.attributes["value"], ParcodegenNodeTypes::AttributeType)
end
end
# OLD: Process Interior IMAP Dependencies
doc.elements.each( 'ManifestData/IMAPDependency') do |imap_dependency|
imap_name = imap_dependency.attributes['imapName']
ityp_name = imap_dependency.attributes['interiorFilename']
pack_file_metadata.add_imap_dependency( imap_name, ityp_name )
end
# Process IMAP Dependencies
doc.elements.each( 'ManifestData/IMAPDependency2' ) do |imap_dependency|
imap_name = imap_dependency.attributes['imapName']
ityp_names = imap_dependency.attributes['itypNames'].split( ';' )
is_interior = imap_dependency.attributes['isInterior'] == 'True'
pack_file_metadata.add_imap_dependencies( imap_name, ityp_names, is_interior )
end
# Process ITYP Dependencies
doc.elements.each( 'ManifestData/ITYPDependency2' ) do |ityp_dependency|
ityp_name = ityp_dependency.attributes['itypName']
ityp_names = ityp_dependency.attributes['itypNames'].split( ';' )
is_interior = ityp_dependency.attributes['isInterior'] == 'True'
pack_file_metadata.add_ityp_dependencies( ityp_name, ityp_names, is_interior )
end
# Process Interior (bounds) Dependencies
doc.elements.each( 'ManifestData/Interior' ) do |interior|
interior_name = interior.attributes["name"]
bounds_names = []
interior.elements.each( "Bounds" ) do |interior_bound|
bounds_names << interior_bound.attributes["name"]
end
if bounds_names.count>0 then
pack_file_metadata.add_interior_dependencies( interior_name, bounds_names )
end
end
end
end
end
# Pack the asset binding information if it contains data.
if ( pack_file_metadata.has_data?() ) then
manifest_filename = "#{OS::Path::get_basename( file.dst )}_manifest.imf"
binding_filename = OS::Path::combine( @conf.temp, 'manifest', manifest_filename )
pack_file_metadata.save( binding_filename )
binding_filename_platform = Resourcing::convert_independent_filename_to_platform( binding_filename, file.content.target )
binding_filename_platform_rpf = Resourcing::convert_independent_filename_to_platform( '_manifest.imf', file.content.target )
# Convert to platform data.
tool = XGE::Tool::from_project_ragebuilder_local( @project, @branch, file.content.target, false )
script = OS::Path::combine(Globals::instance().toolslib, 'util', 'ragebuilder', 'convert_file.rbs' )
platform = RageUtils.rage_platform( file.content.target.platform )
command_line = "#{tool.path} #{script} #{tool.params} -platform #{platform} -src #{binding_filename} -dst #{binding_filename_platform}"
puts "CMD: #{command_line}"
system( command_line )
entry = {}
entry[:src] = binding_filename_platform
entry[:dst] = binding_filename_platform_rpf
file_list << entry
else
RageConverter::log().info("no data to put in manifest file.")
end
# Create the RPF.
result = true
if ( not ProjectUtil::data_rpf_create( file.dst, file_list, true, file.content.target ) ) then
RageConverter::log().error( "RPF construction failed; is #{file.dst} locked?" )
result = false
end
# Temporary data pass.
if ( @c.temporary ) then
file_list.each do |file|
File::delete( file[:src] )
end
end
# Make platform pack files have the same modified date at the
# independent pack file.
File.utime(File.mtime(file.src),File.mtime(file.src),file.dst)
if @c.temporary then
filelist.each do |filename|
inner_dst = Resourcing::convert_independent_filename_to_platform( OS::Path::combine( cache_img, filename ), file.content.target )
File.delete inner_dst if File.exists?(inner_dst)
end
end
duration = Time.now - start_time
RageConverter::log().info( "#{file.dst} complete; taking #{duration}s." )
puts "#{Time.now.strftime('%Y-%m-%d %H:%M:%S')}: #{file.dst} complete; taking #{duration}s."
result
end
#-------------------------------------------------------------------------
# Private Methods
#-------------------------------------------------------------------------
private
#
# Handle adding the HD texture dictionary outputs to the file list
# for RPF construction.
#
def add_hd_texture_dictionaries( inner_ind, inner_dst, rpf_target, target, file_list, pack_file_metadata )
# DHM 2011/05/24 - Ragebuilder may now spit out multiple files
# per independent asset; some special case code picks these up
# until we get Ragebuilder to tell us what its done.
inner_dst_dir = OS::Path::get_directory( inner_dst )
inner_dst_basename = OS::Path::get_basename( inner_dst )
inner_src_ext = OS::Path::get_extension( inner_ind )
if ( inner_ind.ends_with( '.idr.zip' ) ) then
inner_ind_basename = OS::Path::get_basename( inner_ind )
txd_hi_ext = Resourcing::convert_independent_extension_to_platform(
inner_ind, 'itd', target )
txd_hi_filename = OS::Path::combine( inner_dst_dir, "#{inner_dst_basename}+hidr.#{txd_hi_ext}" )
txd_hi_basename = OS::Path::get_basename( txd_hi_filename )
# Secondary File Entry
if ( File::exists?( txd_hi_filename ) ) then
entry2 = {}
entry2[:src] = txd_hi_filename
entry2[:dst] = OS::Path::combine( OS::Path::get_directory( rpf_target ), OS::Path::get_filename( txd_hi_filename ) )
file_list << entry2
pack_file_metadata.add_drawable_asset_binding( inner_ind_basename, txd_hi_basename )
end
elsif ( inner_ind.ends_with( '.ift.zip' ) ) then
inner_ind_basename = OS::Path::get_basename( inner_ind )
txd_hi_ext = Resourcing::convert_independent_extension_to_platform(
inner_ind, 'itd', target )
txd_hi_filename = OS::Path::combine( inner_dst_dir, "#{inner_dst_basename}+hifr.#{txd_hi_ext}" )
txd_hi_basename = OS::Path::get_basename( txd_hi_filename )
# Secondary File Entry
if ( File::exists?( txd_hi_filename ) ) then
entry2 = {}
entry2[:src] = txd_hi_filename
entry2[:dst] = OS::Path::combine( OS::Path::get_directory( rpf_target ), OS::Path::get_filename( txd_hi_filename ) )
file_list << entry2
pack_file_metadata.add_fragment_asset_binding( inner_ind_basename, txd_hi_basename )
end
elsif ( inner_ind.ends_with( '.itd.zip' ) ) then
inner_ind_basename = OS::Path::get_basename( inner_ind )
txd_hi_ext = Resourcing::convert_independent_extension_to_platform(
inner_ind, 'itd', target )
txd_hi_filename = OS::Path::combine( inner_dst_dir, "#{inner_dst_basename}+hi.#{txd_hi_ext}" )
txd_hi_basename = OS::Path::get_basename( txd_hi_filename )
# Secondary File Entry
if ( File::exists?( txd_hi_filename ) ) then
entry2 = {}
entry2[:src] = txd_hi_filename
entry2[:dst] = OS::Path::combine( OS::Path::get_directory( rpf_target ), OS::Path::get_filename( txd_hi_filename ) )
file_list << entry2
pack_file_metadata.add_texture_dictionary_asset_binding( inner_ind_basename, txd_hi_basename )
end
end
end
#
# Run a process
#
def RageConverter::run_command( command_line)
begin
status, stdout, stderr = systemu(command_line)
rescue Exception => ex
puts "Exception: #{ex.message}"
puts "\tStacktrace: #{ex.backtrace.join('\n\r')}"
RageConverter::log().warn( "Systemu hit an execption, it will now run the command with System(). the exception was #{ex.message} the command was #{command_line}" )
# Gracefully handle exceptions in systemu itself ( NOT WHAT IT IS RUNNING - don't be confused. )
system( command_line )
status = 0 # force it to be happy, system doesn't return a code for the process that run anyway.
end
status
end
#
# Determine whether we need to convert a piece of content from src
# (filename) to dst (filename).
#
def need_convert( content, src, dst )
# If it's a directory we will need to walk it's inputs
return true if ( File.directory?( src ) )
if ( not File.exists?(src) ) then
# RageConverter::log().warn( "Source file #{src} does not exist, skipping." )
# return false
return ( true )
end
ragebuilder_timestamp = File.mtime(@tools['xbox360'].path)
# We need to rebuild if our global state says so, the content is marked
# as being dirty, destination file does not exist or the dst modified time
# is earlier than the source modified time.
return true if ( @c.rebuild )
return true if ( content.dirty? )
return true if ( not File::exists?( dst ) )
return true if ( ragebuilder_timestamp > File::mtime( dst ) )
return true if ( ( File::mtime( src ) <=> File::mtime( dst ) ) > 0 )
# Need to create this content if any of its inputs are going to be
# re-created; abstracted from next 'inputs' loop as that checks for
# the input file existing first which may not be the case.
content.inputs.each do |input|
return ( true ) if ( ConvertSystem::instance().need_convert?( input ) )
end
# Need to create this content file if any input is newer than the
# existing content file itself.
mtime = File::mtime( content.filename )
content.inputs.each do |input|
if ( input.is_a?( Content::File ) ) then
# If the input file doesn't exist then we need to build
# ourselves; we assume it will be there by then.
return ( true ) if ( not File::exists?( input.filename ) )
return ( true ) if ( ( File::mtime( input.filename ) <=> mtime ) > 0 )
elsif ( input.is_a?( Content::Group ) ) then
# DHM 2011/04/20: this additional Group parsing has
# been added for the ped pipeline in 3dsmax. It
# constructs Groups as input nodes to build up the
# IDD, ILD files etc. Not sure whether the
# 'inputs.inputs' recursion is a good idea!!
nodes = ( input.children + input.inputs )
nodes.each do |child|
next unless ( child.is_a?( Content::File ) )
mtime_child = File::mtime( child.filename )
return ( true ) if ( ( mtime_child <=> mtime ) > 0 )
end
end
end
# We will not be converting the content src to dst. We put a warning in
# our log about it as the user may have meant for it to happen.
RageConverter::log().warn( "No need to convert #{src} to #{dst}." )
RageConverter::log().debug( " Rebuild: #{@c.rebuild}, Dirty: #{content.dirty?}, mtime src<=>dst: #{(File.mtime(src) <=> File.mtime(dst))}." )
false
end
#
# Function to return the cache extraction directory for a source image
# filename.
#
def get_cache_dir( src )
cache_subdir = ''
if ( @branch.is_export_file?( src ) ) then
cache_subdir = OS::Path::get_directory( src ).sub( @branch.export, '' )
elsif ( @branch.is_processed_file?( src ) ) then
cache_subdir = OS::Path::get_directory( src ).sub( @branch.processed, '' )
else
throw RuntimeError::new( "Invalid export or processed file: #{src}." )
end
OS::Path::combine( @c.cache_root, 'resourcing', cache_subdir, OS::Path::get_basename( src ) )
end
end
end # Converters module
end # Resourcing module
end # Pipeline module
# %RS_TOOLSLIB%/pipeline/resourcing/converters/converter_rage.rb