# # File:: Engine.rb # Description:: Main autobuild engine component. # # Author:: David Muir # Date:: 25 June 2008 # #----------------------------------------------------------------------------- # Uses #----------------------------------------------------------------------------- require "source/builder/reporting/report_ragebuilder" require "source/builder/reporting/report_changelist" require "source/builder/reporting/report_rebuild" require "source/builder/reporting/report_writer" require "source/builder/shell/console" require "source/builder/shell/command" require "source/builder/shell/command_queue" require 'pipeline/config/projects' require 'pipeline/content/content_core' require 'pipeline/log/log' require 'pipeline/os/file' require 'pipeline/os/path' require 'pipeline/os/start' require 'pipeline/projectutil/data_content' require 'pipeline/projectutil/data_convert' require 'pipeline/projectutil/data_convert_map_dependencies' require 'pipeline/projectutil/data_get_latest' require 'pipeline/projectutil/tools_get_latest' require 'pipeline/scm/monitor' require 'pipeline/scm/perforce' require 'pipeline/util/environment' require 'pipeline/util/rexml_write_fix' require 'pipeline/win32/stdin_nonblock_gets' require 'pipeline/util/memory_profiler' require 'log4r/outputter/fileoutputter' include Pipeline require 'socket' require 'fileutils' require 'pp' require 'xml' #----------------------------------------------------------------------------- # Implementation #----------------------------------------------------------------------------- module AssetBuild # # Used for logging just a single build triggered by Cruise Control class EachBuildOutputter < Log4r::FileOutputter def initialize( _name, hash={} ) super( _name, hash ) @filename_base = OS::Path.get_basename( @filename ) @filename_ext = OS::Path.get_extension( @filename ) end def roll() @out.close() @out = File.new( @filename, "w" ) end end # # == Description # The main asset building engine. This class handles all of the asset # build logic. There are utility methods (typically in the # Pipeline::ProjectUtil module) that actually handle most of the work. # # Ideally this is just glue between those components and our generic # Perforce SCM monitoring utility. # class Engine include Assetbuild::Builder::Shell::CommandQueue #--------------------------------------------------------------------- # Constants #--------------------------------------------------------------------- VERSION = 220 ERROR_REGEXP = "\"^(.*)(Error\\s|Error\\s:|Error:\\s)(.*)$\"" WARNING_REGEXP = "\"^(.*)(Warning:\\s)(.*)$\"" # Regexs that match bundled information when cruise control sends commands to the engine. # * these should not match the names of modification files. # * the modifications list is stripped of these commands. REGEX_CC_REBUILD = /^cc_rebuild/ REGEX_WILDCARD = /^wildcard=(.+)/ REGEX_CC_PROJECTNAME = /^cc_project_name=(.+)/ REGEX_REBUILD = /^rebuild$/ MAX_ADDITIONAL_DESCRIPTIONS = 50 # If this character is placed in the wildcard filespec of files to build, # then no recursion will be used when finding files. NO_RECURSE_CHARACTER = "<" # this character needs to be an invalid filename character. # To be derived from something... this needs unhardcoded when I get time EVENT_FILENAME = "N:\\RSGEDI\\Builders\\CruiseControl\\live\\assetbuilder_#{ENV["RS_PROJECT"]}_$(branch)_hw\\events.xml" EVENT_FILENAME_USER = "N:\\RSGEDI\\Builders\\CruiseControl\\live\\assetbuilder_#{ENV["RS_PROJECT"]}_$(branch)_user\\events.xml" #--------------------------------------------------------------------- # Attributes #--------------------------------------------------------------------- attr_reader :project attr_reader :branch attr_reader :config attr_reader :running attr_reader :rebuild attr_reader :num_builds attr_reader :commands # Generic options Hash object containing the following option keys: # disabled: Bool (default: false) # checkin : Bool (default: true) # reports : Bool (default: output path exists status, for local testing) attr_reader :options #--------------------------------------------------------------------- # Public Methods #--------------------------------------------------------------------- def initialize( project, branch, config ) puts "Version: #{VERSION/100.0}" throw ArgumentError.new( 'Invalid project object (#{project.class}).' ) \ unless ( ( nil != project ) and ( project.is_a?( Project ) ) ) throw ArgumentError.new( 'Invalid branch object (#{branch.class}).' ) \ unless ( branch.is_a?( Branch ) ) @lock_problem_counter = 0 @build_lock = Mutex.new() @project = project @branch = branch @config = config @shell_log = Log.new( 'shell' ) @build_log = Log.new( 'build') # add an each build outputter to the root log. @each_build_outputter = EachBuildOutputter.new( get_main_log(), :filename => get_main_log_each_build() ) Pipeline::LogSystem::instance.rootlog.add( @each_build_outputter ) @running = true @options = { 'checkin' => true, 'reports' => ::File::directory?( config.report.path ), } @num_builds = 0 pipeline_config = Pipeline::Config.instance( ) if not pipeline_config.user.is_builder_server() @events_filename = EVENT_FILENAME_USER else @events_filename = EVENT_FILENAME end @branch.in_env do |env| @events_filename = env.subst( @events_filename ) end log_info "Using Event filename #{@events_filename}" env_init( ) @p4 = @project.scm( ) commands_init( ) init_queue( @commands, ENV['COMPUTERNAME'], @config.command_queue.port ) @shell = Assetbuild::Builder::Shell::Console.new( @commands, ENV['COMPUTERNAME'], @config.command_queue.port ) end # Helper method... # puts the msg and log it too. def log_info( msg, cc = false ) msg = "#{Time.now.strftime('%Y-%m-%d %H:%M:%S')} #{msg}" # make sure cc messages are always pumped out despite logging level fix for asset builder slowness # config = Pipeline::Config.instance( ) # log_level= @build_log.level # if cc # @build_log.level # msg = "INFO_MSG: #{msg}" # @build_log.level = Logger::INFO # end @build_log.info( msg ) puts "INFO_MSG: #{msg}" # DW - this should remain - otherwise we can't see twhat is going on when we have set the logging level to warning for optimising the speed fo the assetbuilder temporarily. # if cc # @build_log.level = log_level # end end def commands_init( ) # Initialise command Proc objects sync_proc = Proc.new do |arg| event_time = report_event_started("sync", arg, false, "-"); result = sync( arg ); report_event_finished(result, event_time); result; end view_proc = Proc.new do |arg| view_files( arg ); end rebuild_proc = Proc.new do |arg| rebuild_files( arg ); end reload_proc = Proc.new do reload( ); end set_proc = Proc.new do |arg, val| set_option( arg, val ); end get_proc = Proc.new do |arg| get_option( arg ); end config_proc = Proc.new do get_config( ); end queue_proc = Proc.new do get_queue( ); end status_proc = Proc.new do get_status( ); end exit_proc = Proc.new do exit( ); end pause_proc = Proc.new do ; end resume_proc = Proc.new do ; end build_proc = Proc.new do |arg| build( arg ); end help_proc = Proc.new do help_func(); end threads_proc = Proc.new do threads_func(); end close_logfiles_proc = Proc.new do close_logfiles(); end reopen_logfiles_proc = Proc.new do reopen_logfiles(); end print_class_counts_proc = Proc.new do print_class_counts( ); end toggle_mem_prof_proc = Proc.new do toggle_mem_prof(); end print_mem_proc = Proc.new do print_mem(); end purge_cache_proc = Proc.new do event_time = report_event_started("purge_cache", "", false, "-"); result = purge_cache(); report_event_finished(result, event_time); result; end @commands = [ Assetbuild::Builder::Shell::Command.new( 'sync', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Sync Perforce path.', [ Assetbuild::Builder::Shell::Param.new( 'path', String ) ], sync_proc), Assetbuild::Builder::Shell::Command.new( 'view', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'View assets from wildcard or changelist ID.', [ Assetbuild::Builder::Shell::Param.new( 'wildcard/changelist', String ) ], view_proc ), Assetbuild::Builder::Shell::Command.new( 'rebuild', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Rebuild asset with wildcard or changelist ID.', [ Assetbuild::Builder::Shell::Param.new( 'wildcard/changelist', String ) ], rebuild_proc ), Assetbuild::Builder::Shell::Command.new( 'reload', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Reload Asset Builder configuration file.', Assetbuild::Builder::Shell::Param::NONE, reload_proc ), Assetbuild::Builder::Shell::Command.new( 'pause', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Pause Perforce monitor.', Assetbuild::Builder::Shell::Param::NONE, pause_proc ), Assetbuild::Builder::Shell::Command.new( 'resume', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Resume Perforce monitor.', Assetbuild::Builder::Shell::Param::NONE, resume_proc ), Assetbuild::Builder::Shell::Command.new( 'set', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Set Asset Builder variable.', [ Assetbuild::Builder::Shell::Param.new( 'option', String ), Assetbuild::Builder::Shell::Param.new( 'value', String ) ], set_proc ), Assetbuild::Builder::Shell::Command.new( 'get', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Get Asset Builder variable.', [ Assetbuild::Builder::Shell::Param.new( 'option', String ) ], get_proc ), Assetbuild::Builder::Shell::Command.new( 'config', [ 'c' ], 'Display Asset Builder configuration information.', Assetbuild::Builder::Shell::Param::NONE, config_proc ), Assetbuild::Builder::Shell::Command.new( 'queue', [ 'qu' ], 'Display Asset Builder command queue information.', Assetbuild::Builder::Shell::Param::NONE, queue_proc ), Assetbuild::Builder::Shell::Command.new( 'status', [ 's' ], 'Display Asset Builder status information.', Assetbuild::Builder::Shell::Param::NONE, status_proc ), Assetbuild::Builder::Shell::Command.new( 'build', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Build independent assets listed with p4 filespec.', [ Assetbuild::Builder::Shell::Param.new( 'files', String ) ], build_proc ), Assetbuild::Builder::Shell::Command.new( 'exit', [ 'quit', 'q' ], 'Exit Asset Builder', Assetbuild::Builder::Shell::Param::NONE, exit_proc ), Assetbuild::Builder::Shell::Command.new( 'help', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Display help', Assetbuild::Builder::Shell::Param::NONE, help_proc ), Assetbuild::Builder::Shell::Command.new( 'close_logfiles', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Closes log files', Assetbuild::Builder::Shell::Param::NONE, close_logfiles_proc ), Assetbuild::Builder::Shell::Command.new( 'reopen_logfiles', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Reopens log files', Assetbuild::Builder::Shell::Param::NONE, reopen_logfiles_proc ), Assetbuild::Builder::Shell::Command.new( 'mem_print_class_count', [ 'mem', 'm' ], 'Debug helper for memory leaks.', Assetbuild::Builder::Shell::Param::NONE, print_class_counts_proc ), Assetbuild::Builder::Shell::Command.new( 'mem_log', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Toggle leak tracking.', Assetbuild::Builder::Shell::Param::NONE, toggle_mem_prof_proc ), Assetbuild::Builder::Shell::Command.new( 'mem_print', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Display memory used.', Assetbuild::Builder::Shell::Param::NONE, print_mem_proc ), Assetbuild::Builder::Shell::Command.new( 'threads', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Display threads', Assetbuild::Builder::Shell::Param::NONE, threads_proc ), Assetbuild::Builder::Shell::Command.new( 'purge_cache', Assetbuild::Builder::Shell::Command::NO_SHORTCUTS, 'Purge Cache', Assetbuild::Builder::Shell::Param::NONE, purge_cache_proc ) ] end # # Toggle mem profiling ( leak tracking ) # def toggle_mem_prof() if ( @mem_prof_started ) @mem_prof_started = false puts "stopping leak tracking" Pipeline::Util::MemoryProfiler::stop( ) else @mem_prof_started = true puts "starting leak tracking" Pipeline::Util::MemoryProfiler::start( ) end Assetbuild::Builder::Shell::CommandResultCompleted.new( "completed toggle_mem_prof" ) end # # Print memory # def print_mem() Pipeline::Util::MemoryProfiler::start( ) if @mem_prof_started Pipeline::Util::MemoryProfiler::snapshot( ) Pipeline::Util::MemoryProfiler::stop( ) if @mem_prof_started Assetbuild::Builder::Shell::CommandResultCompleted.new( "completed print_mem" ) end # # purge cache files # def purge_cache() begin #convert = Pipeline::ConvertSystem::instance() #convert.setup( @project ) #puts "Purging #{convert.cache_root}" #FileUtils.rm_f(convert.cache_root) puts "Purging cache..." # ideally I should derive the cache directory form the convert system - but it doesn;t work - no time to investigate cache_folder = OS::Path.combine(ENV['RS_PROJROOT'],"cache") puts "Purging #{cache_folder}..." FileUtils.rm_rf(cache_folder) Assetbuild::Builder::Shell::CommandResultCompleted.new( "Completed Purge Cache in #{cache_folder}" ) rescue P4Exception => ex puts ex.message result = CommandResultError.new( "Purge Cache failed #{ex.message}" ) end end # # Print class counts # def print_class_counts( ) Pipeline::Util::MemoryProfiler::start( ) if @mem_prof_started result = Pipeline::Util::MemoryProfiler::snapshot( :count ) Pipeline::Util::MemoryProfiler::stop( ) if @mem_prof_started Assetbuild::Builder::Shell::CommandResultCompleted.new( "completed print_class_counts" ) end # # Shell sync command handler. # # This command can do a Perforce sync on a particular known-token # (tools, common) or a local file/directory path. # def sync( param ) message = '' if ( :tools_config == param or :tools_config.to_s == param ) then message = "\nSyncing #{@project.uiname} tools config...\n" puts message tools = ProjectUtil::tools_config_get_latest( ) do |client_file, synced, errors| if ( synced ) then message << "\tSynced #{client_file}\n" else message << "Failed to sync #{client_file}\n" puts "\t#{message}" errors.each do |error| @build_log.error( error ); end end end message << "#{tools.size} files synced.\n" elsif ( :tools_bin == param or :tools_bin.to_s == param ) then message = "\nSyncing #{@project.uiname} tools bin...\n" puts message tools = ProjectUtil::tools_bin_get_latest( ) do |client_file, synced, errors| if ( synced ) then message << "\tSynced #{client_file}\n" else message << "Failed to sync #{client_file}\n" puts "\t#{message}" errors.each do |error| @build_log.error( error ); end end end message << "#{tools.size} files synced.\n" elsif ( :tools == param or :tools.to_s == param ) then message = "\nSyncing #{@project.uiname} tools...\n" puts message tools = ProjectUtil::tools_get_latest( ) do |client_file, synced, errors| if ( synced ) then message << "\tSynced #{client_file}\n" else message << "Failed to sync #{client_file}\n" puts "\t#{message}" errors.each do |error| @build_log.error( error ); end end end message << "#{tools.size} files synced.\n" elsif ( :shaders == param or :shaders.to_s == param ) then message = "\nSyncing #{@project.uiname} shaders...\n" puts message shaders = ProjectUtil::data_get_latest_build_shaders( @project, @branch.name ) do |client_file, synced, errors| if ( synced ) then message << "Synced #{client_file}\n" else message << "Failed to sync #{client_file}\n" errors.each do |error| @build_log.error( error ); end end end message << "#{shaders.size} files synced." elsif ( :tools_lib_util_ragebuilder == param or :tools_lib_util_ragebuilder.to_s == param ) then message = "\nSyncing #{@project.uiname} tools/lib/util/ragebuilder...\n" puts message tools = ProjectUtil::tools_lib_util_ragebuilder_get_latest( ) do |client_file, synced, errors| if ( synced ) then message << "\tSynced #{client_file}\n" else message << "Failed to sync #{client_file}\n" puts "\t#{message}" errors.each do |error| @build_log.error( error ); end end end message << "#{tools.size} files synced.\n" elsif ( :platform_data == param or :platform_data.to_s == param ) then message = "\nSyncing #{@project.uiname} platform data...\n" puts message plaform_data = ProjectUtil::data_get_latest_platform_build_labelled( @project, @branch.name ) do |client_file, synced, errors| if ( synced ) then message << "\tSynced #{client_file}\n" else message << "Failed to sync #{client_file}\n" puts "\t#{message}" errors.each do |error| @build_log.error( error ); end end end message << "#{plaform_data.size} files synced.\n" elsif ( :common == param or :common.to_s == param ) then message = "\nSyncing #{@project.uiname} common...\n" puts message shaders = ProjectUtil::data_get_latest_build_common( @project, @branch.name ) do |client_file, synced, errors| if ( synced ) then message << "Synced #{client_file}\n" else message << "Failed to sync #{client_file}\n" errors.each do |error| @build_log.error( error ); end end end message << "#{shaders.size} files synced." elsif ( :assets == param or :assets.to_s == param ) then message = "\nSyncing #{@project.uiname} assets...\n" puts message assets = ProjectUtil::data_get_latest_assets( @project, @branch.name ) do |client_file, synced, errors| if ( synced ) then message << "\tSynced #{client_file}\n" else message << "Failed to sync #{client_file}\n" puts "\t#{message}" errors.each do |error| @build_log.error( error ); end end end message << "#{assets.size} files synced.\n" elsif ( :assets_metadata == param or :assets_metadata.to_s == param ) then message = "\nSyncing #{@project.uiname} assets_metadata...\n" puts message assets_metadata = ProjectUtil::data_get_latest_assets_metadata( @project, @branch.name ) do |client_file, synced, errors| if ( synced ) then message << "\tSynced #{client_file}\n" else message << "Failed to sync #{client_file}\n" puts "\t#{message}" errors.each do |error| @build_log.error( error ); end end end message << "#{assets_metadata.size} files synced.\n" elsif ( :assets_maps_parenttxds == param or :assets_maps_parenttxds.to_s == param ) then message = "\nSyncing #{@project.uiname} assets_maps_parenttxds...\n" puts message assets_maps_parenttxds = ProjectUtil::data_get_latest_assets_maps_parenttxds( @project, @branch.name ) do |client_file, synced, errors| if ( synced ) then message << "\tSynced #{client_file}\n" else message << "Failed to sync #{client_file}\n" puts "\t#{message}" errors.each do |error| @build_log.error( error ); end end end message << "#{assets_maps_parenttxds.size} files synced.\n" elsif ( :assets_processed == param or :assets_processed.to_s == param ) then message = "\nSyncing #{@project.uiname} assets_processed...\n" puts message assets_processed = ProjectUtil::data_get_latest_assets_processed( @project, @branch.name ) do |client_file, synced, errors| if ( synced ) then message << "\tSynced #{client_file}\n" else message << "Failed to sync #{client_file}\n" puts "\t#{message}" errors.each do |error| @build_log.error( error ); end end end message << "#{assets_processed.size} files synced.\n" else # Arbitary Perforce path. throw RuntimeError.new( "Not implemented fetching normal Perforce paths (yet!)" ) end Assetbuild::Builder::Shell::CommandResultCompleted.new( message ) end # # # def set_option( opt, val ) # Fail if its not a recognised option. return CommandResultError.new( "Option #{opt} not known." ) \ unless ( @options.has_key?( opt ) ) # Set option, attempt to coerce into the current option type. # Falling back to String if required. case @options[opt].class when Integer @options[opt] = val.to_i when FalseClass @options[opt] = ( val ? true : false ) when TrueClass @options[opt] = ( val ? true : false ) else @options[opt] = val.to_s end Assetbuild::Builder::Shell::CommandResultCompleted.new( 'ok' ) end # # # def get_option( opt ) # Fail if its not a recognised option. return CommandResultError.new( "Option #{opt} not known." ) \ unless ( @options.has_key?( opt ) ) # Get option; returning as String. Assetbuild::Builder::Shell::CommandResultCompleted.new( @options[opt].to_s ) end # # Handles the special mangled wildcard format for rebuilding non recursively # really this should be a new command but since it would take a fair bit of # work and testing to do this have taken the 'easy' option since the design of the engine doesn;t # lend itself to this too easily. # def self.get_files( wildcard ) if (wildcard.include?(NO_RECURSE_CHARACTER)) OS::FindEx::find_files(wildcard.gsub(NO_RECURSE_CHARACTER,"")) else OS::FindEx::find_files_recurse(wildcard) end end # # View independent data files from changelist or wildcard string. # def view_files( wildcard ) message = '' if ( wildcard =~ /^([0-9]+)$/ ) then cl = $1 message << "Changelist #{cl} files:\n" begin export_files = [] @p4.connect() unless ( @p4.connected?() ) local_files = @p4.files_from_changelist( cl ) local_files.each do |filename| local_filename = OS::Path::normalise( @p4.depot2local( filename ) ) next unless ( @project.branches[@branch.name].is_export_file?( local_filename ) ) export_files << local_filename end export_files.each do |filename| message << "\t#{filename}\n" puts filename end message << "End.\n\n" return CommandResultCompleted.new( message ) rescue P4Exception => ex Assetbuild::Builder::Shell::CommandResultError.new( "Changelist #{cl} was not found." ) end else export = @env.subst( @project.branches[@branch.name].export ) ind_wildcard = @env.subst( OS::Path::combine( export, wildcard ) ) files = Engine::get_files( ind_wildcard ) message = "Export files wildcard: #{ind_wildcard}, #{files.size} files found." files.each do |filename| message << "\t#{filename}\n" end message << "End.\n\n" return Assetbuild::Builder::Shell::CommandResultCompleted.new( message ) end end # # Helper method to prettify a rebuild duration for events. # def prettify_rebuild_duration(build_duration) difference = build_duration seconds = difference % 60 difference = (difference - seconds) / 60 minutes = difference % 60 difference = (difference - minutes) / 60 hours = difference % 24 difference = (difference - hours) / 24 days = difference % 7 weeks = (difference - days) / 7 build_duration = "" build_duration += " #{weeks.to_i} weeks" if weeks > 0 build_duration += " #{days.to_i} days" if days > 0 build_duration += " #{hours.to_i} hrs" if hours > 0 build_duration += " #{minutes.to_i} mins" if minutes > 0 build_duration += " #{seconds.to_i} secs" if seconds > 0 build_duration end EVENT_KEYS = [ "status", "build_date", "sort_date", "build_duration", "project", "description", "cc_project_name" ] # # Publish the event that has started # def report_event_started(name, wildcard, cc_rebuild = false, cc_project_name = "") begin log_info "=== Event #{name} started ===" time = Time.now event_time_formatted = time.strftime("%H:%M:%S %d-%m") sort_date = time.strftime("%Y-%m-%d %H:%M:%S") xmldoc = LibXML::XML::Document::file( @events_filename ) if (xmldoc) events = xmldoc.find_first("//events") if (not events) events = XML::Node.new( 'events' ) xmldoc.root << events end event_node = XML::Node.new( 'event' ) EVENT_KEYS.each { |key| event_node.attributes[key] = "-" } event_node.attributes["project"] = name if (name == "rebuild") event_node.attributes["project"] = cc_rebuild ? "CC Rebuild" : "Manual Rebuild" end event_node.attributes["description"] = "'#{name} #{wildcard}' " event_node.attributes["status"] = "Running" event_node.attributes["build_date"] = event_time_formatted event_node.attributes["sort_date"] = sort_date event_node.attributes["cc_project_name"] = "#{ENV["COMPUTERNAME"]} #{cc_project_name}" events << event_node xmldoc.save( @events_filename, :indent => true ) end rescue Exception => ex puts "Exception during report_event_started: #{ex.message}" puts "Call stack:" puts ex.backtrace.join( "\n\t" ) end sort_date end # # Publish the event that a rebuild has finished # - it will search the xmldoc for the event of the same time # - if it can be found then this event will be finalised. # - stale events are cleaned up. # def report_event_finished(result, event_time) begin log_info "=== Event finished ===" time = Time.now event_end_time_formatted = time.strftime("%H:%M:%S %d-%m") xmldoc = LibXML::XML::Document::file( @events_filename ) if (xmldoc) events = xmldoc.find_first("//events") if (events) event_nodes = events.find("event") if (event_nodes) event_nodes.each do |event| event_hash = {} EVENT_KEYS.each { |key| event_hash[key] = event.attributes[key].nil? ? "" : event.attributes[key] } if (event_hash["sort_date"]==event_time) event_hash["status"] = result.to_s.downcase event_hash["status"] = "Success" if (event_hash["status"].include?("completed") ) event_hash["status"] = "Failure" if (event_hash["status"].include?("error") ) event.attributes["build_completed"] = event_end_time_formatted event_hash["build_duration"] = (Time.parse(event_end_time_formatted)-Time.parse(event_hash["build_date"])) event_hash["build_duration"] = prettify_rebuild_duration(event_hash["build_duration"]) #prettify the duration else # Only one event can be running at any time - fix up stale events. ( should only happen if engine prematurely exits ) event_hash["status"] = "Stale - was running" if (event_hash["status"].downcase=="running") end EVENT_KEYS.each { |key| event.attributes[key] = event_hash[key].nil? ? "" : event_hash[key] } end end end xmldoc.save( @events_filename, :indent => true) end rescue Exception => ex puts "Exception during report_event_finished: #{ex.message}" puts "Call stack:" puts ex.backtrace.join( "\n\t" ) end end # # Rebuild independent data files from changelist or wildcard string. # def rebuild_files( wildcard ) result = nil @build_lock.synchronize do event_time = report_event_started("rebuild", wildcard, false, "-") if ( wildcard =~ /^([0-9]+)$/ ) then cl = $1 #enqueue_command_result( Assetbuild::Builder::Shell::CommandResultProgress.new( "Changelist #{cl} rebuild..." ) ) begin export_files = [] @p4.connect() unless ( @p4.connected?() ) local_files = @p4.files_from_changelist( cl ) local_files.each do |filename| local_filename = OS::Path::normalise( @p4.depot2local( filename ) ) next unless ( @project.branches[@branch.name].is_export_file?( local_filename ) ) export_files << local_filename end #enqueue_command_result( Assetbuild::Builder::Shell::CommandResultProgress.new( "Rebuilding assets and checking-in..." ) ) rebuild_data( export_files ) result = CommandResultCompleted.new( "Rebuild of changelist #{cl} complete." ) rescue P4Exception => ex result = CommandResultError.new( "Changelist #{cl} was not found." ) end else export = @env.subst( @project.branches[@branch.name].export ) ind_wildcard = @env.subst( OS::Path::combine( export, wildcard ) ) files = Engine::get_files( ind_wildcard ) message = "Export files wildcard: #{ind_wildcard}, #{files.size} files found." #enqueue_command_result( Assetbuild::Builder::Shell::CommandResultProgress.new( message ) ) # By creating a new thread we shall hopefully # maintain the shell's interactive ability. #enqueue_command_result( Assetbuild::Builder::Shell::CommandResultProgress.new( "Rebuilding assets and checking-in..." ) ) rebuild_data( files ) result = Assetbuild::Builder::Shell::CommandResultCompleted.new( "Rebuild complete." ) end report_event_finished(result, event_time) end return result end # # # def reload( ) @build_lock.synchronize do @config.reload( ) end Assetbuild::Builder::Shell::CommandResultCompleted.new( "Configuration XML reloaded." ) end # # # def get_config( ) Assetbuild::Builder::Shell::CommandResultCompleted.new( @config.pretty_string( 1, ' ' ) ) end # # # def get_queue( ) num_items_in_q = 0 @shell.queue_walk do |queued_command| Assetbuild::Builder::Shell::CommandResultProgress.new( queued_command.to_s ) num_items_in_q += 1 end return Assetbuild::Builder::Shell::CommandResultCompleted.new( "Queue peek complete. \##{num_items_in_q} items." ) end # # # def get_status( ) c = Pipeline::Config.instance() message = "Project: #{@project.uiname}\n" message << "Branch: #{@branch.name}\n" message << "Build lock: #{@build_lock.locked? ? 'locked' : 'unlocked'}\n" message << "P4 Local: #{@p4.connected?() ? 'connected' : 'not connected'}\n" message << "XGE: #{c.use_xge ? 'enabled' : 'disabled'}\n" message << "Checkin: #{@options['checkin'] ? 'enabled' : 'disabled' }\n" message << "Targets:\n" @project.branches[@branch.name].targets.each_pair do |name, tgt| message << "\tTarget: #{name} #{tgt.enabled}\n" end Assetbuild::Builder::Shell::CommandResultCompleted.new( message ) end # # Common help function. # def help_func( ) message = "Shell help and registered commands:\n" @commands.each do |command| message << "\t#{command.name}" command.shortcuts.each do |s| message << ",#{s}" end message << " " command.parameters.each do |p| message << "<#{p.name}> " end message << "\t\t#{command.help}\n" end Assetbuild::Builder::Shell::CommandResultCompleted.new( message ) end # # Common threads view function. # def threads_func( ) message = '' Thread.list.each do |thr| message += "\t#{thr.inspect}: #{thr[:name]}\n" end Assetbuild::Builder::Shell::CommandResultCompleted.new( message ) end # # Get logfile function. # def get_logfile_func() Assetbuild::Builder::Shell::CommandResultCompleted.new( get_xge_log() ) end # # Build a string of modifications using p4 filespecs or local paths. # def build( modifications_string ) throw ArgumentError.new( 'Invalid modifications object.' ) \ unless ( ( nil != modifications_string ) and ( modifications_string.is_a?( String ) ) ) result = nil # Protect from multiple simultaneous builds by using a mutex lock. log_info "Acquiring build lock" @build_lock.synchronize do log_info "Acquired build lock" @each_build_outputter.roll() clear_log_files() logfiles_dst = [] @num_builds += 1 log_info "============ Started Build \##{@num_builds} @ #{Time.now} =============" modifications = modifications_string.split # strip out the wildcard info ( for event reporting ) and rebuild flag # DW : Not ideal however, the main Cruise Control CI process passes p4 paths into this. # In order that I can differentiate what a CC rebuild is and a CC CI is without adding extra command obsfucation # then this can be inferred whether there are soley p4 paths passed here. # The rebuild flag USED to differentiate these properly, but since all builds became # rebuilds due to the TCS deps I can no longer use this - and to retrospectively define some 'true rebuild' flag is just going to confuse people. wildcard = "" rebuild = false is_a_cc_rebuild = false cc_project_name = "" if ( modifications.length > 0) modifications.delete_if do |mod| # check if a cruise control rebuild if (mod=~REGEX_CC_REBUILD) is_a_cc_rebuild = true true elsif (mod=~REGEX_WILDCARD) # check if we are telling the engine what cruise control wildcard was used wildcard = $1 # The special syntax for recursion needs to be presented nicely. if (wildcard.include?(NO_RECURSE_CHARACTER)) wildcard = wildcard.gsub(NO_RECURSE_CHARACTER,"") wildcard += " (non-recursive)" else wildcard += " (recursive)" end true elsif (mod=~REGEX_CC_PROJECTNAME) cc_project_name = $1 true elsif (mod=~REGEX_REBUILD) # check a rebuild of the assets is to be done. rebuild = true true end end end log_info "=== is_a_cc_rebuild #{is_a_cc_rebuild}" event_time = report_event_started("rebuild", wildcard, is_a_cc_rebuild, cc_project_name) if (is_a_cc_rebuild) log_info "BUILD requested #{modifications.length} modifications : #{modifications_string}" # If we lose our Perforce connection, server down for # example, attempt to reconnect. @p4.connect() unless ( @p4.connected?() ) logfile_dst = nil begin build_start = Time.now sync( :tools_config ) # contains anim related compression stuff sync( :tools_lib_util_ragebuilder ) # contains convert.rbs sync( :tools_bin ) # contains stuff like animcombine.exe sync( :common ) # contains shaders for asset conversion pipeline_config = Pipeline::Config.instance( ) if pipeline_config.user.is_builder_server() # do not sync to platform data unless a builder server - since we do not submit CLs on the 'user' machines - across CC rebuilds data would simply be lost. sync( :platform_data ) # synced in order that users that submit platform versions along with their independent files don;t then prevent a checkin of the platform data since we would not be at the head revision and would need to resolve. end #sync( :assets ) # ragebuilder has deps on this folder DW - 08-07-2010 sync( :assets_metadata ) # ragebuilder has deps on this folder - UPDATED TO BE JUST THIS FOLDER 10/03/11 sync( :assets_maps_parenttxds ) # DW UPDATED 18-11-11 sync( :assets_processed ) # DW UPDATED 20-12-11 # Main Build begin files, deleted_files = [], [] fstat_results = @p4.run_fstat_escaped( modifications ) fstat_results.compact! # paranoia highest_change_number = -1 additional_description = nil max_additional_descriptions = MAX_ADDITIONAL_DESCRIPTIONS fstat_results.each do |fstat| # DW - these are not actually the 'headAction' and 'headChange' since the fstats was a query of revision specifying filespecs. filename = fstat['clientFile'] depot_filename = fstat['depotFile'] action = fstat['headAction'] change_number = fstat['headChange'].to_i revision = fstat['headRev'].to_i if (revision<=0) $stderr.puts("Revision of #{filename} is bad! ( #{revision} ) ") else highest_change_number = change_number if change_number > highest_change_number if ( 0 == 'delete'.casecmp( action ) ) log_info "deleted #{filename}" deleted_files << OS::Path::normalise( filename ) else log_info "add or edit #{filename}" files << OS::Path::normalise( filename ) end if ( max_additional_descriptions > 0 ) additional_description = "Independent files built (truncates at #{MAX_ADDITIONAL_DESCRIPTIONS}):\n" if additional_description.nil? additional_description += "#{change_number} #{action} #{depot_filename}\##{revision}\n" if rebuild additional_description += "#{action} #{depot_filename}\##{revision}\n" unless rebuild max_additional_descriptions -= 1 end end end log_info "Getting Changelist for change #{highest_change_number}" changelist = @p4.run_describe( '-s', highest_change_number.to_s ).shift if highest_change_number >= 0 if (changelist.nil? and not rebuild) $stderr.puts "changelist is nil for changelist #{highest_change_number.to_s} !!!!" else changelist = {} if ( rebuild ) log_info "Converting" do_rebuild = rebuild do_rebuild = true # DW - hack for rebuild for now... its a hack because of the ocde path rebuild or build takes - we don't want a rebuild to sync to files so we force it to rebuild at the last minute... log_info("******* - THIS IS A REBUILD - DUE TO TCS DEPENDENCIES - *******", true) if do_rebuild convert_data( nil, files, deleted_files, do_rebuild, changelist, true, additional_description ) log_info "Converted" end rescue Exception => ex log_exception( ex, "Exception during asset build:" ) end # Log time taken and finalise reports. log_time_msg( Time.now ) rescue Exception => ex log_exception( ex, "build() exception:" ) end # Force garbage collection GC::start() # Move the logfile before the build lock is released to ensure no log file ever gets overwritten. log_info "move log files" move_log_files( logfiles_dst ) log_info "============ Completed Build \##{@num_builds} @ #{Time.now}=============" @each_build_outputter.roll() result = Assetbuild::Builder::Shell::CommandResultCompleted.new( "Build completed. Logfiles : #{logfiles_dst.join(' ')}" ) if (is_a_cc_rebuild) report_event_finished(result, event_time) end end return result end # # Move the logfile before the build lock is released to ensure no log file ever gets overwritten. def move_log_files( logfiles_dst ) logfiles_src = [] begin puts "adding log files" logfiles_src << get_xge_log() logfiles_src << get_main_log_each_build() puts "added #{logfiles_src.length} log files" # logfiles must be closed ( temporarily ) before we can read em. close_logfiles() logfiles_src.each do |logfile_src| if (File.exist?(logfile_src)) logfile_dst = logfile_src.gsub(".log",".cruisecontrol.log") # #{now.strftime('%Y-%m-%d.%H-%M-%S')}.log logfiles_dst << logfile_dst # DW - I put this in because Fileutils.mv kept hanging, so did FileUtils.cp. # eventually it seems that a file.rename is robust, however the sleep may in fact be unnecessary. Kernel.sleep(5) puts "Copying #{logfile_src} to #{logfile_dst}" FileUtils::Verbose.copy(logfile_src, logfile_dst) puts "Log File : #{logfile_dst}" else puts "Log File : empty" end end reopen_logfiles() rescue Exception => ex puts "Unhandled exception: #{ex.message}" puts "Call stack:" puts ex.backtrace.join( "\n\t" ) log_exception( ex, "Monitor exception:" ) end end # Reopen logfiles # def reopen_logfiles() @shell_log.reopen_file() @build_log.reopen_file() Pipeline::LogSystem::instance().rootlog.reopen_file() Assetbuild::Builder::Shell::CommandResultCompleted.new( "Logfiles reopened.") end # Closes logfiles # def close_logfiles() log_info "close_logfiles" @shell_log.close_file() @build_log.close_file() Pipeline::LogSystem::instance().rootlog.close_file() Assetbuild::Builder::Shell::CommandResultCompleted.new( "Logfiles closed.") end # Trys to get an exclusive lock on a file # if it can't get it an idx is incremented # thereafter the filename will not be used again. # def get_lock( filename ) ok = true if File.exist? filename File.open(filename, 'r+') do |f| # get exclusive lock if f.flock File::LOCK_EX | File::LOCK_NB # release the lock f.flock File::LOCK_UN else @lock_problem_counter = @lock_problem_counter + 1 # ensures we dont use the same log file again log_error "file was not able to be exclusively locked #{filename}" ok = false end end end ok end # Obliterate logfiles - don;t wabnt stale log file data coming through. # - Not elegant but does the job. # logfiles are a total complete and utter nightmare! - TODO: review logfiles and how they work and come up with something better! def clear_log_files() begin if (not get_lock(get_xge_log())) return end if (not get_lock(get_main_log_each_build())) return end puts "deleting log files #{get_xge_log()} #{get_main_log_each_build()}" close_logfiles() if File.exist? get_xge_log() puts "delete #{get_xge_log()}" File.delete(get_xge_log()) puts "new #{get_xge_log()}" File.new(get_xge_log(), "w") end if File.exist? get_main_log_each_build() puts "delete #{get_main_log_each_build()}" File.delete(get_main_log_each_build()) puts "new #{get_main_log_each_build()}" File.new(get_main_log_each_build(), "w") end reopen_logfiles() rescue Exception => ex puts "Exception during log delete: #{ex.message}" puts "Call stack:" puts ex.backtrace.join( "\n\t" ) end end # # Start build engine, spawning a separate thread for the build loop. # This allows the interpreter to kill the thread, and restart the # build loop should the config change or a build stall. # def start( ) # Start up our command processing. start_queue_processing( ) # Start up our local Console shell. @shell.start( ) end # # Shutdown our AutoBuild engine. # def exit( ) puts "Shutting down..." @shell.stop( ) @running = false shutdown( ) end # # Temporary measure to get the logifle. # def get_xge_log_dir( ) xge_folder = XGE::get_temp_dir( @project.name, @branch.name ) xge_packet_folder = OS::Path::combine( xge_folder, 'convert' ) end def get_xge_log_filename( ) counter = (@lock_problem_counter==0) ? "" : @lock_problem_counter.to_s 'convert#{counter}.log' end def get_xge_log( ) OS::Path.combine( get_xge_log_dir( ), get_xge_log_filename( ) ) end def get_main_log_filename( ) counter = (@lock_problem_counter==0) ? "" : @lock_problem_counter.to_s "assetbuilder#{counter}.log" end # DW - TODO : these directories can be derived better; rootLog = Pipeline::LogSystem::instance.rootlog def get_main_log_dir ( ) OS::Path::combine(Pipeline::Config::instance().toolsroot, "logs") end def get_main_log( ) OS::Path.combine( get_main_log_dir( ), get_main_log_filename( ) ) end def get_main_log_each_build( ) counter = (@lock_problem_counter==0) ? "" : @lock_problem_counter.to_s "#{get_main_log( )}.each_build#{counter}.log" end #--------------------------------------------------------------------- # Private Methods #--------------------------------------------------------------------- private # # Asset Builder environment initialisation. # def env_init( ) @env = Environment.new() @project.branches[@branch.name].fill_env( @env ) end #--------------------------------------------------------------------- # Build Loop Utility Methods #--------------------------------------------------------------------- # # Rebuild data function. The content parameter may be nil, a single content node # or an Array of content nodes. # def rebuild_data( files ) throw ArgumentError.new( "Invalid filenames Array (#{files.class})." ) \ unless ( files.is_a?( Array ) ) begin rebuild_start = Time.now msg = "Manual data rebuild:" files.each do |filename| msg += "\n#{filename}"; end @build_log.info( msg ) report = ReportRebuildRagebuilder.new( @project, @branch.name, msg, files ) # We don't fetch latest shaders for asset conversion as this # may be required to be done manually. # Rebuild our content convert_data( report, files, [], true, {}, true ) # Log time taken and finalise reports. log_time_msg( rebuild_start ) report_finalise( report ) # Force garbage collection GC::start() rescue Exception => ex log_exception( ex, "Rebuild data exception:" ) end end # # Given a list of platform files that are new ( files) and are requiring deleted ( deleted_files ) # return a list of platform added, edited and deleted files that truly represents what action # is required on these files. To clarify a file that is wished to be deleted might already be delted, so no action is required. # A file that is new could be either edited or added so determine which. def get_added_edited_deleted_platform_files( files, deleted_files ) platform_added_files, platform_edited_files, platform_deleted_files = [], [], [] #------------------------------------------------------ # Get possible Added or Edited files if (files and files.length > 0) fstat_results = @p4.run_fstat_noncompact( files ) if fstat_results fstat_results.each do |fstat| # DW - these are not actually the 'headAction' and 'headChange' since the fstats was a query of revision specifying filespecs. filename = fstat['clientFile'] action = fstat['headAction'] revision = fstat['headRev'].to_i if ( 0 == 'delete'.casecmp( action ) or revision <= 0 ) platform_added_files << OS::Path::normalise( filename ) else platform_edited_files << OS::Path::normalise( filename ) end end end end #------------------------------------------------------ # Get possible Deleted Files if (deleted_files and deleted_files.length > 0) fstat_results = @p4.run_fstat_noncompact( deleted_files ) if fstat_results fstat_results.each do |fstat| # DW - these are not actually the 'headAction' and 'headChange' since the fstats was a query of revision specifying filespecs. filename = fstat['clientFile'] action = fstat['headAction'] revision = fstat['headRev'].to_i if ( 0 == 'delete'.casecmp( action ) or revision <= 0 ) # already deleted or doesn't exist - no action required. else platform_deleted_files << OS::Path::normalise( filename ) # the file we want to delete exists, so just delete it. end end end end # This output will be put into report - do not remove. platform_added_files.each { |file| log_info ":Platform Added File #{file}" } platform_edited_files.each { |file| log_info ":Platform Edited File #{file}" } platform_deleted_files.each { |file| log_info ":Platform Deleted File #{file}" } return platform_added_files, platform_edited_files, platform_deleted_files end # # Return three arrays of filenames that are all outputs. The output # arrays are in: added, edited, deleted respectively. # def get_output_files( export_files, export_deleted_files ) begin raise ArgumentError.new( "Invalid export_files Array specified (#{export_files.class})." ) \ unless ( export_files.is_a?( Array ) ) raise ArgumentError.new( "Invalid export_deleted_files Array specified (#{export_deleted_files.class})." ) \ unless ( export_deleted_files.is_a?( Array ) ) files_content = ProjectUtil::data_content_for_files( @project, export_files ) deleted_files_content = ProjectUtil::data_content_for_files( @project, export_deleted_files ) puts "EXPORT FILES: " puts export_files.join("\n") # Determine our content-tree defined outputs; this is typically # for the processed files that will need to be checked out etc. # We also use this array to determine our platform files. output_files = [] # we want to exclude certain files from the conversion files_to_exclude = [] if ( files_content.nil? or 0 == files_content.size ) then # This case will likely not be hit, as data_content_for_files returns nil entries # for files not in the content tree (see special case below). puts "CONTENT NIL" output_files += ProjectUtil::data_convert_platform_filenames( @project, @branch, export_files ) else puts "ELSE" # here we need to get any dependencies before we continue - JWR files_content = get_content_with_dependencies( files_content ) files_content.each_with_index do |content, index| if ( content.nil? ) then # This case handles files that do not appear in our content tree. puts "\tFILE: #{export_files[index]}" output_files += ProjectUtil::data_convert_platform_filenames( @project, @branch, export_files[index] ) else # JWR - B* 692193 - prevent scene xml files making it into the platform data if content.is_a?( Pipeline::Content::MapSceneXml ) then files_to_exclude << export_files[index] else output_files += ProjectUtil::data_content_get_output_files( @project, @branch, content, true ) end end end end puts "OUTPUT FILES: " puts output_files.join("\n") output_files.uniq! # Determine the platform defined outputs; as we did before. deleted_files = ProjectUtil::data_convert_platform_filenames( @project, @branch, export_deleted_files ) platform_added_files, platform_edited_files, platform_deleted_files = get_added_edited_deleted_platform_files( output_files, deleted_files ) return platform_added_files, platform_edited_files, platform_deleted_files, files_to_exclude rescue Exception => ex puts "UNHANDLE EX: #{ex.message}" puts ex.backtrace.join( "\n" ) end end def get_content_with_dependencies( input_nodes ) dependency_nodes = [] #get dependencies for the input_filenames input_nodes.each do |input_node| if( input_node != nil and (input_node.is_a?( Pipeline::Content::MapZip )) ) then ProjectUtil::get_lod_dependency_nodes( input_node, dependency_nodes ) end end #combine and remove duplicates - do not use uniq! dependency_nodes.delete_if do |node| node.nil? or input_nodes.include?(node) end output_nodes = input_nodes + dependency_nodes output_nodes end # # Get the modified times of files storing in a preexisting hash # against a key that describes an event on that file. Used for pre/post convert times. # def get_modified_times_of_files(modified_times, files, key) log_info("get modified times of files") files.each do |filename| filename = OS::Path::normalise(filename) modified_times[filename] = { key => File.mtime(filename) } if File.exist?(filename) end end # # Build data function. Checks out the files and does a data build. # def convert_data( report, files, deleted_files, rebuild = false, changelist = {}, revert = false, additional_description = nil ) raise ArgumentError.new( "Invalid report specified (#{report.class})." ) \ unless ( report.nil? or report.is_a?( Pipeline::Builder::ReportBase ) ) raise ArgumentError.new( "Invalid files Array specified (#{files.class})." ) \ unless ( files.is_a?( Array ) ) raise ArgumentError.new( "Invalid deleted_files Array specified (#{deleted_files.class})." ) \ unless ( deleted_files.is_a?( Array ) ) begin pipeline_config = Pipeline::Config.instance( ) project = pipeline_config.project project.load_content( @branch.name, true ) if not pipeline_config.user.is_builder_server() log_info "******* - Current user is not a builder_server(), so no changelist will be created and no platform data will be committed - ******* " end env = Environment.new() @project.branches[@branch.name].fill_env( env ) time_format = "%m/%d/%Y %H:%M:%S" time_of_change = changelist.has_key?( 'time' ) ? Time.at(changelist['time'].to_i).strftime(time_format) : "Unknown date/time" desc = "" desc = changelist['desc'] unless changelist.empty? desc = desc.gsub(/Error/i, "E rror") if desc desc = desc.gsub(/Warning/i, "W arning") if desc comment = "#{@project.uiname} Asset Builder platform asset build [ processed at #{Time.now.strftime(time_format)} ]\n" + \ "Branch: #{@branch.name}\n" + \ "Built because of changelist: #{changelist['change']} by #{changelist['user']}@#{changelist['client']} on #{time_of_change}.\n\n" + \ "Changelist description:\n#{desc}\n" \ unless ( changelist.empty? ) comment = "#{@project.uiname} Asset Builder platform asset build #{Time.now.strftime(time_format)}.\n" + "Branch: #{@branch.name}\n" + \ "No associated changelist." if ( changelist.empty? ) comment += "\n#{additional_description}" if ( additional_description ) # Checkout platform files. @p4.connect( ) unless ( @p4.connected? ) change_id = @p4.create_changelist( comment ) if pipeline_config.user.is_builder_server() log_info "Changelist #{change_id} is created : #{comment}" if pipeline_config.user.is_builder_server() platform_added_files, platform_edited_files, platform_deleted_files, files_to_exclude = [] begin # DW - work out which platform files require added, edited and deleted. # since the actions on the ind files don't necessitate that same action on the platform files # because the platform files CAN be in any state : eg. ind file is edited but plat file is deleted, ind file is deleted but plat file already deleted. platform_added_files, platform_edited_files, platform_deleted_files, files_to_exclude = get_output_files( files, deleted_files ) puts "PLATFORM ADD:" platform_added_files.each do |filename| puts "\t#{filename}" end puts "PLATFORM EDIT:" platform_edited_files.each do |filename| puts "\t#{filename}" end puts "PLATFORM DEL:" platform_deleted_files.each do |filename| puts "\t#{filename}" end puts "EXCLUDE:" files_to_exclude.each do |filename| puts "\t#{filename}" file_to_exclude_index = files.find_index(filename) if (file_to_exclude_index != nil) then files.delete_at(file_to_exclude_index) puts "\twas removed" end end rescue Exception => ex puts "Ex: #{ex.message}" puts ex.backtrace.join("\n") end # DHM TESTING -- DONT CHECKOUT FILES UNTIL THEY ARE BUILT. # HOPEFULLY FIX THE FILETYPE ISSUES log_info "Checking out edited platform files (#{platform_edited_files.length})" if pipeline_config.user.is_builder_server() checkout_platform_files( change_id, [], platform_edited_files ) if pipeline_config.user.is_builder_server() # get the modified time of files before convert modified_times = {} get_modified_times_of_files(modified_times, platform_edited_files, "preconvert") # Do actual platform data conversion. ProjectUtil::data_convert_file( files, rebuild ) do |node, success| next unless ( node.is_a?( Pipeline::Content::File ) ) if ( success ) then message = "Converted: #{node.filename}" log_info message else message = "Failed to convert, (however revert is disabled): #{node.filename}" log_info(message, true) @build_log.error( message ) ## @p4.run_revert( node.filename ) end end if ( files.size > 0 ) # get the modified time of files after convert get_modified_times_of_files(modified_times, platform_edited_files, "postconvert") if pipeline_config.user.is_builder_server() log_info "Checking out added platform files (#{platform_added_files.length})" if pipeline_config.user.is_builder_server() checkout_platform_files( change_id, platform_added_files, [] ) if pipeline_config.user.is_builder_server() log_info "Delete platform files #{platform_deleted_files.length}" if pipeline_config.user.is_builder_server() delete_platform_files( report, change_id, platform_deleted_files ) if pipeline_config.user.is_builder_server() # Checkout/add files again so that we can add new files. This is # in case the content configuration is updated with new files. log_info "checkin revert files" if pipeline_config.user.is_builder_server() checkin_revert_files( report, change_id, modified_times, changelist, revert ) if pipeline_config.user.is_builder_server() # Parse our converter output to append it to our report. if report log_info "Parse Report" parse_report( report ) end @p4.disconnect() rescue Exception => ex log_exception( ex, "convert_data exception:" ) end end # # Parse our converter output to append it to our report. def parse_report( report ) raise ArgumentError.new( "Invalid report specified (#{report.class})." ) \ unless ( report.is_a?( ReportRebuildRagebuilder ) ) log_info "Parsing convertor output" # Now use the C# parser since ruby would run out of memory # and be too slow... begin # just pass the errors on... cmd = OS::Path.combine(Pipeline::Config::instance.toolsbin, "errorparser.exe") log_info "running cmd #{cmd}" output, errors, warnings = Resourcing::ConvertSystem::instance().get_converter_output( cmd, ERROR_REGEXP, WARNING_REGEXP ) log_info "Setting report errors and warnings" report.set_output_errors_warnings( output, errors, warnings ) rescue Exception => ex log_exception( ex, "Exception during error parsing:" ) end end # # Write out report data. # def report_finalise( report, changelist = {}, max_id = -1, subject = nil ) # Ensure our output directory exists if ( not File.directory?( @config.report.path ) ) then FileUtils.mkdir_p( @config.report.path ) end user = ( changelist.empty? ? Socket.gethostname() : changelist['user'] ) id = ( changelist.empty? ? 'rebuild' : changelist['change'] ) now = Time.now report_date = now.strftime( '%Y-%m-%d' ) report_filename = '' if ( changelist.empty? ) then report_filename = "rebuild_#{now.strftime('%Y-%m-%d %H-%M-%S')}_#{project.name}_report.xml".gsub( ' ', '_' ) else report_filename = "#{id}_#{project.name}_report.xml".gsub( ' ', '_' ) end report_path = OS::Path.combine( @config.report.path, report_date, report_filename ) report_path = report_path.gsub("assetbuilder","assetbuilder_hw") report_url = @config.report.webserver + '/' + OS::Path::combine( report_date, OS::Path::replace_ext( report_filename, 'html' ) ) report.save( report_path, true, report_url ) # Run XSLT and write as HTML xslt_report = Pipeline::Builder::XSLTReportWriter.new( report_path ) html_filename = OS::Path.replace_ext( report_path, 'html' ) xslt_report.write( @config.report.xslt_html, html_filename ) # Send email report email_report_html = Pipeline::Builder::HTMLEmailReportWriter.new( report_path ) if ( report.error_count > 0 ) then subject = "#{@project.uiname} #{@branch.name}: #{id}: *** ERRORS *** [#{user}|#{id}/#{max_id}]" if subject.nil? else subject = "#{@project.uiname} #{@branch.name}: #{id}: OK [#{user}|#{id}/#{max_id}]" if subject.nil? end email_report_html.write( @config.report.email.xslt, subject, @config.report.email.addresses[:from], @config.report.email.addresses[:list], @config.report.email.addresses[:maintainer], @config.report.email.aliases[:from], @config.report.email.aliases[:list], @config.report.email.aliases[:maintainer] ) puts "HTML report saved to #{html_filename}" report.close() end # # Log and print the generic synced to changelist message. # def log_sync_msg( monitor, changelist, skipped ) message = "Synced #{monitor.root_folder}@#{changelist['change']}, skipped: #{skipped}" log_info message end # # Log an exception. # def log_exception( ex, prefix = 'Error : Unhandled exception:' ) message = "#{prefix} #{ex.message}" enqueue_command_result( Assetbuild::Builder::Shell::CommandResultProgress.new( message ) ) @build_log.error( message ) ex.backtrace.each do |m| puts m @build_log.error( "Error : #{m}" ) end end # # Log and print the build done with time message. # def log_time_msg( build_start ) build_time = ( Time.now - build_start ) message = "Build done: #{build_time} seconds" #enqueue_command_result( Assetbuild::Builder::Shell::CommandResultProgress.new( message ) ) @build_log.info( message ) end # # Checkout platform files for our project given the array of files # to checkout (platform files, Array). # def checkout_platform_files( change_id, add_files, edit_files, lock = false ) throw ArgumentError.new( "Invalid files list (#{add_files.class})." ) \ unless ( add_files.is_a?( Array ) ) throw ArgumentError.new( "Invalid files list (#{edit_files.class})." ) \ unless ( edit_files.is_a?( Array ) ) begin log_info "Checking out files (#{add_files.size}) for add & convert..." add_files.each do |filename| if (not File.file?(filename) ) log_info "Error: The file #{filename} does not exist yet we are about to add it. This is called AFTER conversion." next end log_info "Add: #{filename} CL #{change_id.to_s}" @p4.run_edit_or_add( '-c', change_id.to_s, filename ) log_info "Added: #{filename} in CL #{change_id.to_s}" # Get basetype. fstat = @p4.run_fstat_escaped( filename ).shift log_info "fstat succeeded for: #{filename}" basetype, modifiers = fstat['type'].split( '+' ) puts "Basetype: #{basetype}" puts "Modifiers: #{modifiers}" @p4.run_reopen( '-t', "#{basetype}+S16w", filename ) end @build_log.info( "Checking out files (#{edit_files.size}) for edit & convert..." ) edit_files.each do |filename| #if (not File.file?(filename)) # log_info "Error: The file #{filename} does not exist to checkout for edit." # next #end log_info "Checkout: #{filename} CL #{change_id.to_s}" @p4.run_edit( filename ) # Removing "-t +S10w" so we can maintain the modifiers. @p4.run_reopen( '-c', change_id.to_s, filename ) end rescue P4Exception => ex log_exception( ex, "Error : Perforce exception during checkout:" ) end end # # Delete platform files for our project. # def delete_platform_files( report, change_id, files ) throw ArgumentError.new( "Invalid report specified (#{report.class})." ) \ unless ( report.nil? or report.is_a?( Pipeline::Builder::ReportBase ) ) throw ArgumentError.new( "Invalid files list (#{files.class})." ) \ unless ( files.is_a?( Array ) ) begin log_info "Deleting files (#{files.size})..." files.each do |filename| log_info "Deleting: #{filename} CL #{change_id.to_s}" @p4.run_delete( '-c', change_id.to_s, filename ) end rescue P4Exception => ex log_exception( ex, "Perforce exception during delete:" ) end end # # Checkin currently opened files. # def checkin_revert_files( report, change_id, modified_times, changelist = {}, revert = true ) log_info "Checkin is enabled" if @options['checkin'] log_info "reverting!" if revert return unless @options['checkin'] throw ArgumentError.new( "Invalid report specified (#{report.class})." ) \ unless ( report.nil? or report.is_a?( Pipeline::Builder::ReportBase ) ) begin revert_results = @p4.run_revert( '-a', '-c', change_id.to_s ) if revert log_info( "Reverting unchanged #{revert_results.length} files...", true ) if revert if ( ( not revert_results.nil? ) ) then report.additional << 'The following files were reverted:' \ unless ( report.nil? or revert_results.empty? ) revert_results.each do |revert| next unless ( revert.is_a?( Hash ) ) report.additional << revert['clientFile'] unless report.nil? # check the modified time - it might be worth issuing a warning that this file was not modified. filename = OS::Path::normalise(revert['clientFile']) log_info "EngineMesssage: *** REVERTED (unchanged) #{filename} ***" if ( modified_times and modified_times[filename] and modified_times[filename]["preconvert"] == modified_times[filename]["postconvert"] ) @build_log.warn("Warning: Revert unchanged #{filename} : Contents AND Timestamp unchanged. File did not convert?") end end end # Submit will raise an exception if there are no files to submit # so we detect here if we need to submit. files = @p4.run_opened( '-c', change_id.to_s, '-C', @p4.client ) if ( files.size > 0 ) then log_info "Check in (#{files.size}) files currently in CL #{change_id.to_s}...( *** which may not be the final CL *** )" begin # Attempt a submit, if a P4Exception is raised then we # assume its because we have unresolved files. #@p4.run_unlock( '-c', change_id.to_s ) submit_result = @p4.run_submit( '-c', change_id.to_s ) submit_result.each do |sr| log_info "Checking in files (#{files.size}) in CL #{sr['submittedChange'].to_s}..." if sr.has_key?('submittedChange') end rescue P4Exception => ex log_info "Error : Checking in files hit a P4Exception - are the files locked? or are they needing resolved?" begin # We assume that the submit failed because files need # to be resolved so lets try that, accepting ours. log_info "Resolving" resolve_results = @p4.run_resolve( '-ay' ) if ( ( not resolve_results.nil? ) ) then report.additional << 'The following files were resolved:' \ unless ( report.nil? or resolve_results.empty? ) resolve_results.each do |resolve| next unless ( resolve.is_a?( Hash ) ) report.additional << resolve['clientFile'] unless report.nil? end end #@p4.run_unlock( '-c', change_id.to_s ) log_info "Submitting (post resolve) - it still may not work - if say for example the file was locked." @p4.run_submit( '-c', change_id.to_s ) rescue P4Exception => ex log_info "Error : Dealing with a failed submit or resolve, has resulted in further errors. - was the target file locked?" end end else # Remove empty changelist. #@p4.run_unlock( '-c', change_id.to_s ) @p4.delete_changelist( change_id ) log_info "No files to checkin. CL is now deleted #{change_id.to_s}" @build_log.debug( "No files to checkin." ) end rescue P4Exception => ex log_exception( ex, "Error: Perforce exception during revert/checkin:" ) end end end end # AssetBuild module # End of engine.rb