Files
gtav-src/tools_ng/lib/util/codetest_util.rb
T
2025-09-29 00:52:08 +02:00

909 lines
34 KiB
Ruby
Executable File

#
# File:: codetest_util.rb
# Description:: Multipurpose script for use with codetest stats.
# - Publishes binaries and modifications.xml in perforce - for use by codebuilder.
# - Also is used to copy these published files.
# - Also is used to sync data
# - Also handles aggregating modifications.
# - Also checks modifications to check if we skip a build.
# - Also checks executable to see how far it is behind the head.
# - this should really be split into several files - but I hate to maintain too many, hence a helper script for all related.
#
# Author:: Derek Ward <derek.ward@rockstarnorth.com>
# Date:: 23rd August 2011
#
# Passed in :- see OPTIONS ...
# Passed out :- stderr contains all errors
# stdout for all other output.
# Returns :- returns non zero upon detecting any errors
#-----------------------------------------------------------------------------
# Uses / Requires
#-----------------------------------------------------------------------------
require 'pipeline/config/projects'
require 'pipeline/os/getopt'
require 'pipeline/os/file'
require 'systemu'
require 'rexml/document'
require 'fileutils'
include Pipeline
#-----------------------------------------------------------------------------
# Constants
#-----------------------------------------------------------------------------
OPTIONS = [
[ "--help", "-h", OS::Getopt::BOOLEAN, "display usage information." ],
[ '--filespec', '-f', OS::Getopt::REQUIRED, 'the filespec of the files to publish' ],
[ '--publish_folder', '-pf', OS::Getopt::REQUIRED, 'the folder the files are published to' ],
[ '--enable_checkin', '-c', OS::Getopt::OPTIONAL, 'enable checkin - default is off' ],
[ '--copy_dir', '-cd', OS::Getopt::OPTIONAL, 'the directory to copy everything to' ],
[ '--sync_data', '-sd', OS::Getopt::BOOLEAN, 'sync to the data of the modifications file source code commits' ],
[ '--build_shaders', '-sh', OS::Getopt::BOOLEAN, 'sync to the shaders and build them' ],
[ '--aggregate', '-a', OS::Getopt::BOOLEAN, 'aggregate modifications that were skipped to the current revision we are on' ],
[ '--check_skip', '-cs', OS::Getopt::OPTIONAL, 'the number of tests from the head in which we would want to skip to the head' ],
[ '--check_exe', '-ce', OS::Getopt::BOOLEAN, 'tests the exe to see if it was in the CLs processed otherwsie we have nothing to test' ],
[ '--check_capture', '-cc', OS::Getopt::BOOLEAN, 'tests the capture to see if it was in the CLs processed otherwsie we have nothing to do' ],
[ '--modifications_file','-m', OS::Getopt::OPTIONAL, 'the modifications file' ],
[ '--exe', '-e', OS::Getopt::OPTIONAL, 'the filename of the executable' ],
[ '--capture_file', '-cf', OS::Getopt::OPTIONAL, 'the filename of the capture file' ],
]
INFO = "[colourise=black]INFO_MSG: "
INFO_BLUE = "[colourise=blue]INFO_MSG: "
MSG_PREFIX_EMAIL = "[colourise=blue]INFO_EMA: "
MSG_PREFIX_WEB = "[colourise=blue]INFO_WEB: "
INFO_GREEN = "[colourise=green]INFO_MSG: "
INFO_GREY = "[colourise=grey]INFO_MSG: "
INFO_ORANGE = "[colourise=orange]INFO_MSG: "
MSG_PREFIX = "#{INFO_GREEN} CodeTestUtil:"
MSG_PREFIX_PERSIST = "#{INFO_GREEN} CodeTestUtil:"
MSG_PREFIX_COMMANDLINE = "#{INFO_GREY}"
PUBLISH_FOLDER = "cruisecontrol_builds/codebuilder" # the folder for publishing the files to
REVISION_LIMIT = 32 # the number of revisions to store in p4
INVALID_EXTENTIONS = ["pdb","xdb","idb","ib_pdb_index"] # extensions of files that will not be published - done this way so that new filetype make it through as they are created.
VALID_BUILDS = ["psn_beta","psn_bankrelease","psn_release"] # hardcoding the builds that are permitted to run this script.
MODIFICATIONS_FILE = "modifications.xml"
AGGREGATE_MODIFICATIONS_FILE = "aggregate_modifications.xml"
LAST_MODIFICATIONS_FILE_REVISION_FILENAME = "last_build.txt"
PUBLISH_CL_COMMENT = "Automatically created by codetest_util.rb."
# FYI - the modifications.xml 'schema'
#
#<!-- Start of the group of modifications (even if just one). -->
#<ArrayOfModification xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
# <!-- Start of one modification. -->
# <Modification>
# <!-- The change number. -->--filename=
# <ChangeNumber>... value ...</ChangeNumber>
# <!-- The comment. -->
# <Comment>... value ...</Comment>
# <!-- The user's email address. -->
# <EmailAddress>... value ...</EmailAddress>
# <!-- The affected file name. -->
# <FileName>... value ...</FileName>
# <!-- The affect file's folder name. -->
# <FolderName>... value ...</FolderName>
# <!-- The change timestamp, in yyyy-mm-ddThh:mm:ss.nnnn-hhmm format -->
# <ModifiedTime>... value ...</ModifiedTime>
# <!-- The operation type. -->
# <Type>... value ...</Type>
# <!-- The user name. -->
# <UserName>... value ...</UserName>
# <!-- The related URL. -->
# <Url>... value ...</Url>
# <!-- The file version. -->
# <Version>... value ...</Version>
# <!-- End of modification. -->
# </Modification>
# <!-- End of the group of modifications. -->
#</ArrayOfModification>
#Pasted from <http://confluence.public.thoughtworks.org/display/CCNET/Modification+Writer+Task>
#=============================================================================================
#
# Copys dir published files to execution build folder
#
class CopyDir
#
# constructor
#
def initialize()
end
@@log = nil
def CopyDir.log
@@log = Log.new( 'copy_dir' ) if @@log == nil
@@log
end
#---------------------------------------------------------------------------------------------------------
#
# take the published folder we have synced to and ensure binaries are copied over in the place where they will be run from.
#
def copy_dir( p4, publish_folder, copy_dir )
# ---- copy files over ----
src = OS::Path::combine(publish_folder,"*.*")
files = OS::FindEx.find_files(src)
CopyDir::log.info "#{MSG_PREFIX}Copying #{files.length} files"
files.each do |file|
src = file
dst = copy_dir
CopyDir::log.info "#{MSG_PREFIX} Copy #{src} -> #{dst}"
FileUtils.cp src, dst
end
CopyDir::log.info "#{MSG_PREFIX} Copied published build to the build directory, ready for use."
end
end # end class CopyDir
#=============================================================================================
#
# Syncs data to time of source code commits
#
class SyncData
#
# constructor
#
def initialize()
end
@@log = nil
def SyncData.log
@@log = Log.new( 'sync_data' ) if @@log == nil
@@log
end
#---------------------------------------------------------------------------------------------------------
#
# take the published folder we have synced to and ensure binaries are copied over in the place where they will be run from.
# - also sync to shaders of this change and build these shaders
#
def sync_data( p4, publish_folder, build_shaders )
# ---- sync to data ----
# we read the modifications file in order that we understand the time of the last commit
# they we get p4 to sync to the build data of that time
mods_filename = OS::Path::combine(publish_folder,MODIFICATIONS_FILE)
doc = REXML::Document.new(File.new(mods_filename))
mods = doc.elements.to_a( "//ArrayOfModification/Modification" )
# a funky Xpath expression would rid me of this code...
# we don't want to sync to the data of the binary checkins!
# rather, we want to sync to the data of the original source code checkins
changes = []
mods.each do |mod|
change_num = mod.elements["ChangeNumber"]
modification_is_publish_of_binaries = false
if ( mod.elements["Comment"] and
mod.elements["Comment"].text and
mod.elements["Comment"].text.include?PUBLISH_CL_COMMENT)
modification_is_publish_of_binaries = true
SyncData::log.info "#{MSG_PREFIX} Ignoring CL #{mod.elements['ChangeNumber'].text} it was the binary checkin and should not be used to sync data to."
end
changes << change_num unless modification_is_publish_of_binaries
end
SyncData::log.info "#{MSG_PREFIX_PERSIST} read #{changes.uniq.length} changes in order to derive what we synced to."
# get the time of this change from perforce
change_numbers = []
changes.each do |change|
SyncData::log.debug "#{MSG_PREFIX} #{change.text}"
num = change.text.to_i
change_numbers << num
end
newest_change_number = change_numbers.sort[0]
SyncData::log.info "#{MSG_PREFIX_PERSIST} #{newest_change_number} is the latest CL of all changes read."
#---------------------------------------------------------------------------------------
build_folder = ENV["RS_BUILDBRANCH"]
#build_folder = p4.local2depot( build_folder )
sync_cmd = "#{build_folder}...@#{newest_change_number}"
SyncData::log.info "#{MSG_PREFIX_PERSIST} Syncing to Data : p4 sync #{sync_cmd}"
out = p4.run_sync("#{build_folder}/...@#{newest_change_number}")
SyncData::log.info "#{MSG_PREFIX_PERSIST} The sync of data synced : #{out.length} modifications."
out.each { |o| SyncData::log.info " Synced : #{MSG_PREFIX_PERSIST} #{o["depotFile"]}\##{o['rev']}" }
if (build_shaders)
#---------------------------------------------------------------------------------------
# shaders : treated like data : but are code really.
rage_shader_lib = ENV["RAGE_DIR"] + "\\base\\src"
rage_shader_lib = p4.local2depot( rage_shader_lib )
sync_cmd = "#{rage_shader_lib}/...@#{newest_change_number}"
SyncData::log.info "#{MSG_PREFIX_PERSIST} Syncing to Shaders : p4 sync #{sync_cmd}"
out = p4.run_sync("#{rage_shader_lib}...@#{newest_change_number}")
SyncData::log.info "#{MSG_PREFIX_PERSIST} The sync of data synced : #{out.length} modifications."
out.each { |o| SyncData::log.info " Synced : #{MSG_PREFIX_PERSIST} #{o["depotFile"]}\##{o['rev']}" }
#---------------------------------------------------------------------------------------
shader_src = ENV["RS_CODEBRANCH"] + "\\game"
shader_src = p4.local2depot( shader_src )
sync_cmd = "#{shader_src}/...@#{newest_change_number}"
SyncData::log.info "#{MSG_PREFIX_PERSIST} Syncing to Shaders : p4 sync #{sync_cmd}"
out = p4.run_sync("#{shader_src}...@#{newest_change_number}")
SyncData::log.info "#{MSG_PREFIX_PERSIST} The sync of data synced : #{out.length} modifications."
out.each { |o| SyncData::log.info " Synced : #{MSG_PREFIX_PERSIST} #{o["depotFile"]}\##{o['rev']}" }
#----------------------------------------------------------------------------------------
# now build shaders
# cmd = ENV["RS_CODEBRANCH"] + "\\game\\shader_source\\VS_Project\\batch\\rsm_build_psn.bat"
# SyncData::log.info "#{MSG_PREFIX_PERSIST} (rebuild shaders) Running #{cmd}"
# status, stdout, stderr = systemu(cmd)
# ignore any errors for now... this isnt a shader compiler!
end
end
end # end class SyncData
#=============================================================================================
#
# Publishes binaries in p4
#
class PublishFiles
@@log = nil
def PublishFiles.log
@@log = Log.new( 'sync_data' ) if @@log == nil
@@log
end
#
# constructor
#
def initialize()
end
#---------------------------------------------------------------------------------------------------------
#
# Control loigc to publish binaries in p4.
#
def publish_files(p4, publish_folder, filespec, enable_checkin)
PublishFiles.log.info"#{MSG_PREFIX_PERSIST} This build will submit its modifications.xml and executables into #{publish_folder}. ( build will take longer )"
PublishFiles.log.info"#{MSG_PREFIX} publish_folder = #{publish_folder}"
PublishFiles.log.info"#{MSG_PREFIX} filespec = #{filespec}"
PublishFiles.log.info"#{MSG_PREFIX} enable_checkin = #{enable_checkin}"
# ---- find files ----
files = find_files(filespec)
# ---- create a p4 CL ----
change_id = create_cl(p4)
# ---- copy files to publish folder ----
copy_files_to_publish_folder(files, p4, publish_folder, change_id)
# ---- revert unchanged files ----
files_in_cl = revert_unchanged(p4, change_id)
# ---- checkin ----
submit(p4, files_in_cl, change_id, enable_checkin)
end
#---------------------------------------------------------------------------------------------------------
#
# create a cl
#
def create_cl(p4)
PublishFiles.log.info"#{MSG_PREFIX} Create p4 CL"
change_id = p4.create_changelist( "Automatically created by codetest_util.rb\n" )
raise Exception if change_id.nil?
PublishFiles.log.info"#{MSG_PREFIX} Created CL #{change_id}"
change_id
end
#---------------------------------------------------------------------------------------------------------
#
# copy published files in the publishing folder.
#
def copy_files_to_publish_folder(files, p4, publish_folder, change_id)
FileUtils::mkdir_p(publish_folder)
PublishFiles.log.info"#{MSG_PREFIX} Copy files to publish folder and edit or add to CL"
files.each do |file|
src = file
dst = publish_folder
filename_dst = OS::Path::combine(publish_folder, OS::Path::get_filename(src))
puts "#{MSG_PREFIX}sync and edit #{filename_dst}"
p4.run_sync(filename_dst)
p4.run_edit( '-c', change_id.to_s, filename_dst ) if File.exist?(filename_dst)
puts "#{MSG_PREFIX}Copying #{src} -> #{dst}"
FileUtils.cp src, dst
puts "#{MSG_PREFIX} Add or Edit #{filename_dst}"
p4.run_edit_or_add( '-c', change_id.to_s, filename_dst )
fstat = p4.run_fstat( filename_dst ).shift
basetype, modifiers = fstat['type'].split( '+' )
p4.run_reopen( '-c', change_id.to_s, filename_dst )
end
end
#---------------------------------------------------------------------------------------------------------
#
# revert unchanged files in changelist
#
def revert_unchanged(p4, change_id)
PublishFiles.log.info"#{MSG_PREFIX}Reverting unchanged files #{change_id}"
p4.run_revert( '-a', '-c', change_id.to_s, '//...')
files_in_cl = p4.run_opened( '-c', change_id.to_s )
raise Exception if files_in_cl.nil?
PublishFiles.log.info"#{MSG_PREFIX}There are #{files_in_cl.size} files to submit in CL #{change_id}"
files_in_cl.each do |file|
PublishFiles.log.info"#{MSG_PREFIX}#{file['depotFile']} has been updated and will be submitted."
end
files_in_cl
end
#---------------------------------------------------------------------------------------------------------
#
# submit changes
#
def submit(p4, files_in_cl, change_id, enable_checkin )
if ( enable_checkin )
if ( files_in_cl.size > 0 )
PublishFiles.log.info"#{MSG_PREFIX}Submitting file currently in #{change_id}"
submit_result = p4.run_submit( '-c', change_id.to_s )
PublishFiles.log.info"#{MSG_PREFIX} Submit result : #{submit_result.to_s}"
# revert files in case they became locked
files_in_cl.each do |file_in_cl|
p4.run_revert(file_in_cl)
fstat = p4.run_fstat( file_in_cl ).shift
PublishFiles.log.info"#{MSG_PREFIX} FSTAT: #{fstat}"
end
elsif ( 0 == files_in_cl.size )
PublishFiles.log.info"#{MSG_PREFIX}Deleting #{change_id} no files changed."
p4.run_change('-d', change_id.to_s)
end
else
PublishFiles.log.info"#{MSG_PREFIX}Checkin is disabled the CL is pending."
end
end
#---------------------------------------------------------------------------------------------------------
#
# search for files as per wildcard passed in.
#
def find_files(filespec)
PublishFiles.log.info"#{MSG_PREFIX} Finding files in #{filespec}"
files = OS::FindEx.find_files(filespec)
if files.size == 0
PublishFiles.log.warn("Warning: No files found")
Process.exit!( 1 )
else
PublishFiles.log.info"#{MSG_PREFIX} pruning files"
files.delete_if do |file|
ext = OS::Path::get_extension(file).downcase
prune = INVALID_EXTENTIONS.include?(ext)
puts "#{MSG_PREFIX} pruned #{file}" if prune
prune
end
files.each do |file|
PublishFiles.log.info"#{MSG_PREFIX} Source file: #{file}"
end
end
files
end
end # end class PublishFiles
#=============================================================================================
#
# Builds an aggregate of modifications, based upon what revision of modifications we have
# and what modifications we processed last.We collect them all in order to indicate we
# are testing all these modifications. This means we have to retrieve older revisions of
# modifications from perforce.
#
class AggregateMods
#
# constructor
#
def initialize()
end
@@log = nil
def AggregateMods.log
@@log = Log.new( 'aggregate_mods' ) if @@log == nil
@@log
end
#---------------------------------------------------------------------------------------------------------
#
# build an aggregate modifications list in a file for a range of modifications...
#
def aggregate_mods(p4, publish_folder, have_modifications_revision, last_modifications_revision, last_mods_revision_filename)
mods_filename = OS::Path::combine(publish_folder,MODIFICATIONS_FILE)
modifications_cumulative = []
( (last_modifications_revision+1)..(have_modifications_revision-1) ).each do |rev|
AggregateMods::log.info "#{MSG_PREFIX} Syncing #{mods_filename}##{rev}"
p4.run_sync("#{mods_filename}##{rev}")
# the above CAN fail! since we have limited revisions stored of these files
if File.exist?(mods_filename)
File.open(mods_filename) do |file|
temp_doc = REXML::Document.new(file)
AggregateMods::log.info "#{MSG_PREFIX} Appending modifications"
modifications = temp_doc.elements.to_a( "//ArrayOfModification/Modification" )
# skip cumuluated binaries
modifications.each do |mod|
modification_is_publish_of_binaries = false
if ( mod.elements["Comment"] and
mod.elements["Comment"].text and
mod.elements["Comment"].text.include?PUBLISH_CL_COMMENT)
modification_is_publish_of_binaries = true
end
if ( modification_is_publish_of_binaries and not rev.to_i == have_modifications_revision.to_i)
AggregateMods::log.info "#{MSG_PREFIX} Not cumulating CL #{mod.elements['ChangeNumber'].text}"
else
modifications_cumulative << mod
end
end
temp_doc = nil
end
else
AggregateMods::log.warn "#{MSG_PREFIX} Warning: Sync to revision #{rev} of #{mods_filename} did not get a file - this file has likely been purged from p4?"
end
end
AggregateMods::log.info "#{MSG_PREFIX} #{mods_filename}##{have_modifications_revision}"
p4.run_sync("#{mods_filename}##{have_modifications_revision}")
if File.exist?(mods_filename)
doc = REXML::Document.new(File.new(mods_filename))
modifications_cumulative.each do |mods|
doc.elements["ArrayOfModification"] << mods
end
aggregate_filename = OS::Path::combine(publish_folder,AGGREGATE_MODIFICATIONS_FILE)
AggregateMods::log.info "#{MSG_PREFIX} Writing aggregate_filename #{aggregate_filename}"
file = File.open( aggregate_filename,"w+" )
file << doc
file.close
else
AggregateMods::log.error "#{MSG_PREFIX} Error: Didn't get #{mods_filename}##{have_modifications_revision}"
end
# write out the version we processed
AggregateMods::log.info "#{MSG_PREFIX} Writing last_mods_revision_filename #{last_mods_revision_filename} with revision #{have_modifications_revision}"
File.open(last_mods_revision_filename, "w+") { |file| file.write(have_modifications_revision.to_s) }
end
end # end class AggregateModifications
#=============================================================================================
#
# Check if we processed a build with an exe in it otherwise what is the point in performing
# a test, we should not be needlessly be testing the same exe again.
#
class CheckExe
#
# constructor
#
def initialize()
end
@@log = nil
def CheckExe.log
@@log = Log.new( 'check_exe' ) if @@log == nil
@@log
end
#---------------------------------------------------------------------------------------------------------
#
# Control logic
#
def check_exe(p4, modifications_file, exe)
CheckExe::log.info "#{MSG_PREFIX_PERSIST} Checking if the exe was in the modifications list - otherwise we have nothing to test"
CheckExe::log.info "#{MSG_PREFIX} modifications_file = #{modifications_file}"
CheckExe::log.info "#{MSG_PREFIX} exe = #{exe}"
doc = REXML::Document.new(File.new(modifications_file))
mods = doc.elements.to_a( "//ArrayOfModification/Modification" )
CheckExe::log.info "#{MSG_PREFIX} The modification filenames are :-"
found_exe = false
mods.each do |mod|
filename = mod.elements["filename"]
filename = mod.elements["FileName"] unless filename
if not filename
CheckExe::log.error "#{MSG_PREFIX} no filename element in modification #{mod}"
next
end
CheckExe::log.info "#{MSG_PREFIX} #{filename.text}"
found_exe = true if OS::Path.normalise(filename.text) == OS::Path.normalise(exe)
end
if found_exe
CheckExe::log.info "#{MSG_PREFIX_PERSIST} A new revision of #{exe} has been found and can be tested."
CheckExe::log.info("#{MSG_PREFIX_PERSIST} This build can continue as it has passed the check exe test")
return true
end
# DW : this used to be an error for sake of reporting but erroring is emailing unnecessarily.
CheckExe::log.info("NO NEW EXECUTABLE TO TEST : check_exe determined that there is no NEW executable to test in this build.")
CheckExe::log.info("Either the associated codebuilder build failed OR reverted upon unchanged the executable <#{exe}>")
CheckExe::log.info("This build was executed in order that the codetest pipeline is fully executed in order to make reporting clear.")
return true
end
end # end class CheckExe
#=============================================================================================
#
# Check if we processed a build with a capture in it otherwise what is the point in performing
# a test, we should not be needlessly processing the same file again - might end up with repeat info in database too.
#
class CheckCapture
#
# constructor
#
def initialize()
end
@@log = nil
def CheckCapture.log
@@log = Log.new( 'check_capture' ) if @@log == nil
@@log
end
#---------------------------------------------------------------------------------------------------------
#
# Control logic
#
def check_capture(p4, modifications_file, capture_filename)
CheckCapture::log.info "#{MSG_PREFIX_PERSIST} Checking if the capture file was in the modifications list - otherwise we have nothing to work with"
CheckCapture::log.info "#{MSG_PREFIX} modifications_file = #{modifications_file}"
CheckCapture::log.info "#{MSG_PREFIX} capture_filename = #{capture_filename}"
doc = REXML::Document.new(File.new(modifications_file))
mods = doc.elements.to_a( "//ArrayOfModification/Modification" )
CheckCapture::log.info "#{MSG_PREFIX} The modification filenames are :-"
found_capture_file = false
mods.each do |mod|
filename = mod.elements["filename"]
filename = mod.elements["FileName"] unless filename
if not filename
CheckCapture::log.error "#{MSG_PREFIX} no filename element in modification #{mod}"
next
end
CheckCapture::log.info "#{MSG_PREFIX} #{filename.text}"
found_capture_file = true if OS::Path.normalise(filename.text) == OS::Path.normalise(capture_filename)
end
if found_capture_file
CheckCapture::log.info "#{MSG_PREFIX_PERSIST} A new revision of #{capture_filename} has been found and can be tested."
CheckCapture::log.info("#{MSG_PREFIX_PERSIST} This build can continue as it has passed the check capture_filename test")
return true
end
CheckCapture::log.error("NO NEW CAPTURE FILE #{capture_filename} TO TEST : check_capture determined that there is no NEW capture file to test in this build.")
CheckCapture::log.error("This means the associated codetester capture failed, which in turn can mean that the codebuilder build didn't create a new executable.")
CheckCapture::log.error("This build was executed in order that the codetest pipeline is fully executed in order to make reporting clear.")
return false
end
end # end class CheckCapture
#=============================================================================================
#
# Check if we should skip this build, we would skip if we are too far behind the head,
# the danger is that if we fall behind the head too far then since the binaries that are published
# only store limited number of revisions that we will not be able to retrieve the binary.
#
class CheckSkip
#
# constructor
#
def initialize()
end
@@log = nil
def CheckSkip.log
@@log = Log.new( 'check_skip' ) if @@log == nil
@@log
end
#---------------------------------------------------------------------------------------------------------
#
# Control logic
#
def check_skip(p4, publish_folder, check_skip, exe)
CheckSkip::log.info "#{MSG_PREFIX_PERSIST} Checking if we should skip this test"
CheckSkip::log.info "#{MSG_PREFIX} publish_folder = #{publish_folder}"
CheckSkip::log.info "#{MSG_PREFIX} check_skip = #{check_skip} ( the number of revisions behind the head that will incur a skip of this test )"
CheckSkip::log.info "#{MSG_PREFIX} exe = #{exe}"
# ---- build exe filename ----
filename = OS::Path.combine(publish_folder,exe)
# ---- find files ----
fstat = p4.run_fstat( filename ).shift
if (not fstat)
CheckSkip::log.error("Error: Bad fstat")
return false
end
CheckSkip::log.info("#{MSG_PREFIX_PERSIST} We will skip this test if we are #{check_skip} revisions behind the head of #{exe}")
have_revision = fstat['haveRev'].to_i
head_revision = fstat['headRev'].to_i
rev_delta = head_revision-have_revision
if rev_delta >= check_skip.to_i
CheckSkip::log.error("Error: We are #{rev_delta} revisions of #{filename} behind the head" )
CheckSkip::log.error("Error: (have revision =#{have_revision} head_revision=#{head_revision}" )
CheckSkip::log.error("Error: This build will error and stop now so that we do not process this build - this is not a 'true' error per se.")
return false
end
CheckSkip::log.info("#{MSG_PREFIX_PERSIST} Currently we are #{rev_delta} revisions of #{filename} behind the head")
CheckSkip::log.info("#{MSG_PREFIX_PERSIST} (have revision=#{have_revision} head_revision=#{head_revision}")
CheckSkip::log.info("#{MSG_PREFIX_PERSIST} This build can continue as it has passed the check skip test")
return true
end
end # end class CheckSkip
#---------------------------------------------------------------------------------------------------------
#
# if this a valid build - only certain build configurations are to be handled ( see VALID_BUILDS )
#
def valid_build(publish_folder)
VALID_BUILDS.each { |build| return true if publish_folder.include?(build) }
false
end
#---------------------------------------------------------------------------------------------------------
#
# get the revisions of modifications xml file, returning the revision we have and the last revisions processed
# this gives us a range of revisions for which we need to collect all the modifications from.
#
def get_modification_revisions(p4, publish_folder,last_mods_revision_filename )
# ask p4 the revision of the modifications
last_modifications_revision = 0
if (File.exist?(last_mods_revision_filename))
file = File.new(last_mods_revision_filename, "r")
last_modifications_revision = file.gets.to_i
PublishFiles::log.info "#{MSG_PREFIX} last_modifications_revision #{last_modifications_revision}"
else
PublishFiles::log.info "#{MSG_PREFIX} last_mods_revision_filename #{last_mods_revision_filename} doesn't exist"
end
have_modifications_revision = 0
modifications_file = OS::Path::combine(publish_folder,MODIFICATIONS_FILE)
ret = p4.run_files("#{modifications_file}#have")
have_modifications_revision = ret[0]["rev"].to_i if ret and ret[0]
PublishFiles::log.info "#{MSG_PREFIX} #{MODIFICATIONS_FILE} last #{last_modifications_revision} have #{have_modifications_revision}"
return last_modifications_revision, have_modifications_revision
end
#---------------------------------------------------------------------------------------------------------
#
# Create a p4 object
#
def create_p4(config)
PublishFiles::log.debug "#{MSG_PREFIX} Create p4 connection"
p4 = SCM::Perforce::create( config.sc_server, config.sc_username, config.sc_workspace )
p4.connect( )
raise Exception if not p4.connected?
p4
end
#-----------------------------------------------------------------------------
# Application entry point
#
# DW - could do with some tidying when I get time!!!!!
#
#-----------------------------------------------------------------------------
if ( __FILE__ == $0 )
begin
g_AppName = File::basename( __FILE__, '.rb' )
g_ProjectName = ''
g_BranchName = ''
g_Project = nil
g_Config = Pipeline::Config.instance()
#--------------------------------------------------------------------
# --- PARSE COMMAND LINE ---
#--------------------------------------------------------------------
opts, trailing = OS::Getopt.getopts( OPTIONS )
if ( opts['help'] )
puts OS::Getopt.usage( OPTIONS )
puts ("Press Enter to continue...")
$stdin.getc( )
Process.exit!( 1 )
end
puts "#{MSG_PREFIX_COMMANDLINE} #{$0} #{ARGV.join(" ")}"
# ---- create a p4 connection ----
p4 = create_p4(g_Config)
# ---- if in check skip mode, check to see if we should skip
if (opts['check_exe'])
modifications_file = opts['modifications_file']
exe = opts['exe']
if (modifications_file and exe)
skip_threshold = opts['check_exe']
check_exe = CheckExe.new()
puts"#{MSG_PREFIX} Checking #{modifications_file} for #{exe}"
ret = check_exe.check_exe( p4, modifications_file, exe )
puts"#{MSG_PREFIX} Check exe returned : #{ret}"
Process.exit!(-1) if not ret
Process.exit!( 0 )
else
$stderr.puts"Error: no modifications and/or exe file specified"
Process.exit!(-1)
end
end
# ---- check to see if we have a new capture ----
if (opts['check_capture'])
modifications_file = opts['modifications_file']
capture_file = opts['capture_file']
if (modifications_file and capture_file)
check_capture = CheckCapture.new()
puts"#{MSG_PREFIX} Checking #{modifications_file} for #{capture_file}"
ret = check_capture.check_capture( p4, modifications_file, capture_file )
puts"#{MSG_PREFIX} Check capture returned : #{ret}"
Process.exit!(-1) if not ret
Process.exit!( 0 )
else
$stderr.puts"Error: no modifications and/or capture file specified"
Process.exit!(-1)
end
end
# the following modes require the publish folder...
Process.exit!( 2 ) unless (opts['publish_folder'])
publish_folder = opts['publish_folder']
# ---- check if to run or not... ----
if (not valid_build(publish_folder))
puts "#{MSG_PREFIX} No build publish of executable and modifications is to be performed on this build. ( publish folder = #{publish_folder} ) This is ok."
Process.exit!( 0 )
end
# ---- if in check skip mode, check to see if we should skip
if (opts['check_skip'])
skip_threshold = opts['check_skip']
exe = opts['exe']
if (exe)
check_skip = CheckSkip.new()
Process.exit!(-1) if not check_skip.check_skip( p4, publish_folder, skip_threshold, exe )
Process.exit!( 0 )
else
Process.exit!(-1)
end
end
# ---- if in sync data mode, sync the data
if (opts['sync_data'])
sync_data = SyncData.new()
sync_data.sync_data( p4, publish_folder, opts['build_shaders'] )
Process.exit!( 0 )
end
# ---- if in copy mode, just copy and exit from the publishing - kept as one script for simplicitiy ( for now ) ----
if (opts['copy_dir'])
copy_directory = opts['copy_dir']
copy_dir = CopyDir.new()
copy_dir.copy_dir( p4, publish_folder, copy_directory )
Process.exit!( 0 )
end
# ---- aggregate various modifications revisions - since we have skipped to the head...
last_mods_revision_filename = OS::Path::combine(publish_folder,LAST_MODIFICATIONS_FILE_REVISION_FILENAME)
last_modifications_revision,have_modifications_revision = get_modification_revisions(p4, publish_folder, last_mods_revision_filename)
if (opts['aggregate'])
aggregate_mods = AggregateMods.new()
aggregate_mods.aggregate_mods(p4, publish_folder, have_modifications_revision, last_modifications_revision, last_mods_revision_filename)
Process.exit!( 0 )
end
Process.exit!( 4 ) unless (opts['filespec'])
filespec = opts['filespec']
enable_checkin = false
enable_checkin = true if (opts['enable_checkin'])
# ---- publish files into p4 ( binaries or stats )----
publish_files = PublishFiles.new()
publish_files.publish_files(p4, publish_folder, filespec, enable_checkin)
Process.exit! 0
rescue Exception => ex
$stderr.puts "Error: Unhandled exception: #{ex.message}"
$stderr.puts "Backtrace:"
ex.backtrace.each { |m| $stderr.puts "\t#{m}" }
Process.exit! -1
end
end