#!/usr/bin/python
#------------------------------------------------------------------------------
# Copyright (c) Perforce Software, Inc., 2011-2015. All rights reserved
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1 Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL PERFORCE
# SOFTWARE, INC. BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
# THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
#------------------------------------------------------------------------------
#
import sys
import os
import marshal
import shutil
import platform
import re
import time
import argparse
import getpass
import hashlib
from string import upper
from smtplib import SMTP, SMTPException
from ConfigParser import ConfigParser, NoOptionError
from xml.etree import ElementTree
from subprocess import Popen, PIPE
###########################################################################
##### GLOBAL VARIABLES AND DEFAULTS, IF APPLICABLE
#####
g_supported_p4d_version = "2010.2"
g_automergeEnabled = True
g_args = None
g_parser = None
g_connectionInfo = {}
g_baseDir = os.path.dirname(os.path.realpath(__file__))
g_hostname = platform.node()
g_config = None
g_parentDirName = os.path.split(g_baseDir)[1]
g_baseDepotPath = os.environ.get("automerge.baseDepotPath")
g_baseClientPath = ""
g_configDepotPath = ""
g_configClientPath = ""
g_configFile = ""
g_launcherDepotPath = ""
g_launcherClientPath = ""
g_automergeDepotPath = ""
g_automergeClientPath = ""
g_branchesXML = None
g_mergesXML = None
g_promoteLocksDepotPath = ""
g_promoteLocksClientPath = ""
g_senderEmail = ""
g_automergeAdmins = []
g_smtpServer = ""
g_smtpPort = 25
g_smtpUsername = ""
g_smtpPassword = ""
g_nagTimeMinutes = 1
g_maxChangelists = 100
g_maxChangelistFiles = 100
g_automergeUsers = []
g_honorUserProtections = False
g_runAs = None
g_amUser = ""
g_amClientName = ""
g_logfile = sys.stdout
g_logfileStr = ""
###########################################################################
##### USAGE/LOGGING OPERATIONS
#####
PROGRAM_USAGE = """\
automerge [-h] [-d] [-v] [-b <BranchSpec> -c <Changelist>] [Options]
This script is used by developers to merge <Changelist> from
a source stream to a target stream using <BranchSpec>
With no options specified, the script merges a single changelist,
performing the following steps:
* Create a \"staged\" pending changelist for the merge
* Preview the merge...
* If the source changelist contains file moves, halt and ask
user to move these files before proceeding
* Verify only single revisions of files are scheduled for resolve,
otherwise warn user that older changelists must be merged first...
* Verify all files modified in the source changelist are scheduled
for resolve, if not warn user...
* If integration errors are found in preview, halt and request user
manually address error before resuming. When the script resumes,
it will repeat the integration preview for the changelist that resulted
in the halt.
Arguments:
-h Help -- prints this message
-v Verbose Mode -- output is noisier - specifically, the various p4 commands are
printed along with other debugifying information
-d Daemon Mode -- runs the script in non-interactive mode
-b BranchSpec -- The branch mapping specification to use for this merge
According to convention this is of the form "source_to_target"
-c Changelist -- A changelist submitted to the source stream that is to be merged to
the target stream. If the --multi option is specified, Changelist is
is the changelist or label to merge up to, starting with the oldest
unmerged changelist
Options when in interactive mode:
--auto
Automatically resolve and submit change lists with no conflicts This will merges
all unmerged changelists, one at a time, from source to target stream. The unmerged
changelists are determined by using p4 interchanges and iteratively performs the
merge for each unmerged change.
--promote
Performs the copy-up procedure from the specified branch specification.
Usage Notes:
Before running this script, your workspace should have no pending
changelists associated with it.
Interactive Mode Examples:
To integrate changelist 31353 using branch map map_to_dev1 without resolving automatically:
automerge -b main_to_dev1 -c 31353
To integrate, resolve -am and submit if no conflicts:
automerge -b main_to_dev1 -c 31353 --auto
To integrate, resolve and submit if no conflicts all umerged changes up to the label "latest_QA_pass":
automerge -b main_to_dev1 -c latest_QA_pass --auto
To promote to main using branch map main_to_dev1:
automerge -b main_to_dev --promote
"""
# usage
# display the usage instructions above, with an optional message above and
# below
def usage(message=""):
if len(message) > 0:
log(None, "===============================================")
log(None, message)
log(None, "===============================================")
log(None, g_parser.usage)
if len(message) > 0:
log(None, "===============================================")
log(None, message)
log(None, "===============================================")
###########################################################################
##### INITIALIZATION OPERATIONS
#####
def initialize():
global g_automergeEnabled,g_baseDir,g_hostname,g_config,g_parentDirName,g_baseDepotPath,g_baseClientPath,g_launcherDepotPath,g_launcherClientPath,g_configDepotPath,g_configClientPath,g_configFile,g_automergeDepotPath,g_automergeClientPath,g_branchesXML,g_mergesXML,g_promoteLocksDepotPath,g_promoteLocksClientPath,g_amUser,g_senderEmail,g_automergeAdmins,g_smtpServer,g_nagTimeMinutes,g_maxChangelists,g_maxChangelistFiles,g_automergeUsers,g_honorUserProtections,g_logfileStr,g_smtpUsername,g_smtpPassword,g_smtpPort
# path and directory information
g_baseDir = os.path.dirname(os.path.realpath(__file__))
g_hostname = platform.node()
g_parentDirName = os.path.split(g_baseDir)[1]
g_baseClientPath = "//" + g_connectionInfo["clientName"] + "/" + g_parentDirName
g_configDepotPath = g_baseDepotPath + "/automerge.ini"
g_configClientPath = g_baseClientPath + "/automerge.ini"
g_configFile = g_baseDir + os.sep + "automerge.ini"
g_launcherDepotPath = g_baseDepotPath + "/automerge_impl.py"
g_launcherClientPath = g_baseClientPath + "/automerge_impl.py"
g_automergeDepotPath = g_baseDepotPath + "/automerge.py"
g_automergeClientPath = g_baseClientPath + "/automerge.py"
g_branchesXML = ElementTree.fromstring(getTextFile(g_baseDepotPath + "/branches.xml"))
g_mergesXML = ElementTree.fromstring(getTextFile(g_baseDepotPath + "/merges.xml"))
g_promoteLocksDepotPath = g_baseDepotPath + "/promoteLocks/"
g_promoteLocksClientPath = g_baseClientPath + "/promoteLocks/"
# read automerge.ini file
g_config = ConfigParser(); g_config.read(g_configFile)
# automerge enabled flag
g_automergeEnabled = g_config.getboolean("control","enabled")
# user config
g_amUser = g_config.get("p4config","automerge.account")
# notification information
g_senderEmail = g_config.get("notification","sender.email")
g_automergeAdmins = g_config.get("notification","automerge.admins").split(',')
g_smtpServer = g_config.get("notification","smtp.server")
g_nagTimeMinutes = g_config.getint("notification","nag.time.min")
if g_config.has_option("notification","smtp.username"):
g_smtpUsername = g_config.get("notification","smtp.username")
g_smtpPort = g_config.getint("notification","smtp.port")
g_smtpPassword = g_config.get("notification","smtp.password")
# settings
g_maxChangelists = g_config.getint("settings","max.changelists")
g_maxChangelistFiles = g_config.getint("settings","max.changelist.files")
g_automergeUsers = g_config.get("settings","automerge.users").split(',')
g_honorUserProtections = g_config.getboolean("settings","honor.user.protections")
# logging
g_logfileStr = g_config.get("logging","logfile")
### end of initialize
#Verify the basic validity of Branches.xml and Merges.xml
def verifyXMLFiles():
debug("Performing error checks on branches/merges.xml files")
merges = []
#retrieve all merge entries from merges.xml
results = g_mergesXML.findall("./merge")
for r in results:
merges.append(r.attrib)
#perform error checks on each entry
for merge in merges:
#retrieve fields from an entry in merges.xml
if 'source' in merge:
source = merge['source']
else:
errorExit("Invalid entry in merges.xml - missing source attribute")
if 'target' in merge:
target = merge['target']
else:
errorExit("Invalid entry in merges.xml - missing target attribute")
if 'branchSpec' in merge:
branchSpec = merge['branchSpec']
else:
errorExit("Invalid entry in merges.xml - missing branchSpec attribute")
if 'direction' in merge:
direction = merge['direction']
else:
errorExit("Invalid entry in merges.xml - missing direction attribute")
if 'auto' in merge:
auto = merge['auto']
else:
errorExit("Invalid entry in merges.xml - missing auto attribute")
#get source path and verify it is valid
sourcePath = getBranchPath(source)
if sourcePath == None:
errorExit("merges.xml config error: Can't find path for branch named " + source + " in branches.xml")
else:
results = p4MarshalCmd(["changes","-m1",sourcePath + "/..."])
#check for at least one submitted change to this path
if containsError(results):
errorExit("Error validating " + sourcePath + " for branch " + source + " in branches.xml - error with changes command")
if results == []:
errorExit("Possible invalid path of " + sourcePath + " for branch " + source + " in branches.xml")
#get target path and verify it is valid
targetPath = getBranchPath(target)
if targetPath == None:
errorExit("merges.xml config error: Can't find path for branch named " + target + " in branches.xml")
else:
results = p4MarshalCmd(["changes","-m1",targetPath + "/..."])
#check for at least one submitted change to this path
if containsError(results):
errorExit("Error validating " + targetPath + " for branch " + target + " in branches.xml - could not execute changes command")
if results == []:
errorExit("Possible invalid path of " + targetPath + " for branch " + target + " in branches.xml")
#verify branch spec exists
results = p4MarshalCmd(["branches","-e",branchSpec])
if containsError(results):
errorExit("Error validating branchSpec " + branchSpec + " error with branches command")
if results == []:
errorExit(branchSpec + " is configured in merges.xml, but does not exist in P4")
#verify direction
validDirections = ["MergeDown","CopyUp","Both"]
if direction not in validDirections:
errorExit("Found unsupported direction of " + direction + " in merges.xml")
#verify auto setting
validAutoSettings = ["true","false"]
if auto not in validAutoSettings:
errorExit("Found unsupported auto setting of " + auto + " in merges.xml")
#verify branch owner is valid
ownerEmail = getBranchOwnerEmail(branchSpec)
if ownerEmail == None:
warn("No (or invalid) owner email address configured for branch " + source + " in branches.xml")
debug("Completed error checking in branches/merges.xml for merge with branchSpec " + branchSpec)
###########################################################################
##### EXCEPTION DEFINITIONS
#####
# ERROR CONSTANTS -- STORED IN THE 'type' ATTRIBUTE OF AN EXCEPTION AND USED
# WHEN EVALUATING WHAT TO DO WHEN AN EXCEPTION OCCURS
GENERIC_ERROR = 10
COMMAND_ERROR = 20
CHANGELIST_ERROR = 25
CONNECTION_ERROR = 30
LOGIN_ERROR = 40
CONFIGURATION_ERROR = 50
INTEGRATE_ERROR = 60
RESOLVE_ERROR = 70
FILESOPEN_ERROR = 80
INTEGRATEFLAG_ERROR = 90
INTEGRATEPREVIEWMISMATCH_ERROR = 100
RESOLVEPREVIEWMISMATCH_ERROR = 110
AUTORESOLVE_ERROR = 120
SUBMIT_ERROR = 130
MOVE_ERROR = 140
NOAUTOMERGE_ERROR = 150
MAXFILES_ERROR = 160
OUTOFORDER_ERROR = 170
CHANGELISTMISSING_ERROR = 180
# raiseAutomergeException
# utility function to raise a command error
# arguments:
# type - the exception type (constant)
# parameter - a short string description of the error
# command (optional) - the command that caused the error -- this is stored
# in the data dictionary with the key 'command'
# map (optional) - an arbitrary dictionary of key-value pairs, stored in
# the data dictionary
def raiseAutomergeException(type, parameter, command='', map={}):
ae = AutomergeException(parameter)
ae.data['type'] = type
if len(command) > 0:
ae.data['command'] = command
for key in map:
ae.data[key] = map[key]
raise ae
# AutomergeException
# Exception used in the script
class AutomergeException(Exception):
data = {}
def __init__(self, value):
self.parameter = value
self.data['type'] = GENERIC_ERROR
# this will format the data in the exception
def __str__(self):
info = self.parameter
info += "\n"
for key in self.data:
info += " {0}: {1}\n".format(key,self.data[key])
return info
###########################################################################
##### LOGGING OPERATIONS
#####
# debug
# logs the message if the verbose flag is turned on
def debug(*msg):
if g_args.verbose:
prefix='[DEBUG]'
for m in msg:
log(prefix, m)
# info
# information messages
def info(*msg):
prefix='[INFO] '
for m in msg:
log(prefix, m)
# warn
# warning messages
def warn(*msg):
prefix='[WARN]'
for m in msg:
log(prefix, m)
# error
# error messages
def error(*msg):
prefix='[ERROR]'
for m in msg:
log(prefix, m)
# errorExit
# Exit with error messages
def errorExit(*msg):
error(*msg)
exit()
# exit
# exit with error
def exit(*msg):
prefix = '[EXIT] '
for m in msg:
log(prefix, m)
log(prefix,"*** AUTOMERGE FINISHED ***")
raise SystemExit()
# log
# write to logfile
def log(prefix, *msg):
for m in msg:
# only write the prefix in daemonMode
if g_args.daemonMode and prefix is not None:
g_logfile.write(prefix)
g_logfile.write(' ')
elif prefix == "[ERROR]" or prefix == "[DEBUG]" or prefix == "[WARN]":
# write debug prefix even in interactive mode.
g_logfile.write(prefix)
g_logfile.write(' ')
g_logfile.write(m)
g_logfile.write(os.linesep)
###########################################################################
##### UTILITY OPERATIONS
#####
# formatTime
# returns the time structure formatted as a GMT string
def formatTime(data):
return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(float(data)))
# sendEmail
# generic function to send a message
def sendEmail(toList, subject, msg):
if not isinstance(toList, list):
toList = [toList]
try:
message = "From: " + g_senderEmail + "\n"
message += "To: " + ", ".join(toList) + "\n"
message += "Subject: " + subject + "\n"
message += "\n"
message += msg
if canSend(message):
debug("Sending email...")
if g_smtpUsername != "":
#use SMTP authentication
mail = SMTP(g_smtpServer,g_smtpPort)
mail.ehlo()
mail.starttls()
mail.ehlo()
mail.login(g_smtpUsername, g_smtpPassword)
mail.sendmail(g_senderEmail, toList, message)
mail.close()
else:
#no authentication
mail = SMTP(g_smtpServer)
mail.sendmail(g_senderEmail, toList, message)
setEmailCounter(message)
info("Successfully sent email")
else:
info("Cannot send email yet due to g_nagTimeMinutes setting ({0})".format(g_nagTimeMinutes))
except SMTPException:
error("Error: unable to send email")
# canSend
# function to determine if the specified email message is "sendable" -- i.e. if the email
# has been sent within nagTimeMinues (or ever, if g_nagTimeMinutes==0) then this function
# returns false
def canSend(message):
sendable = False
# create a hash of the message
h = hashlib.sha224(message).hexdigest()
# counter is in the format
# automerge_email.4f1a165f21347b8b126fa2b8449a3734d2e0ab653361b588bbdee433 = 1331743158.52
counter = "automerge.email.{0}".format(h)
# get all the counters that start with the string automerge_email
counters = getAutomergeCounters(prefix='automerge.email')
# check to see if the current counter is in the list of existing email counters
if counter in counters.keys():
# if g_nagTimeMinutes is 0, then only one email should be sent, otherwise we
# need to see if g_nagTimeMinutes has elapsed since the last email message
# was sent
if g_nagTimeMinutes > 0:
if time.time() > float(counters[counter]) + (g_nagTimeMinutes * 60):
sendable = True
else:
sendable = True
return sendable
# setEmailCounter
# sets a counter on the server in the format
# automerge_email.4f1a165f21347b8b126fa2b8449a3734d2e0ab653361b588bbdee433 = 1331743158.52
# which is based on the hash of the message. The value is the timestamp
def setEmailCounter(message):
# get a hash of the message
h = hashlib.sha224(message).hexdigest()
counter = "automerge.email.{0}".format(h)
cmd = ['counter','-f',counter,str(time.time())]
result = p4MarshalCmd(cmd,impersonate=False)
# if there's an error, raise an exception of type 'command'
if containsError(result):
raiseAutomergeException(COMMAND_ERROR, "Error setting email counter", command=cmd)
# getAutomergeCounters
# returns a list of the counters that have the specified prefix
# the default prefix is 'automerge'
def getAutomergeCounters(prefix='automerge'):
counters = {}
results = p4MarshalCmd(["counters"],quiet=True)
for r in results:
if r['code'] == 'stat':
if r['counter'].startswith(prefix):
counters[r['counter']] = r['value']
return counters
# setCounter
# stores the counter in the specified value
def setCounter(counter, value):
cmd = ['counter','-f',counter,value]
result = p4MarshalCmd(cmd,impersonate=False)
# if there's an error, raise an exception of type 'command'
if containsError(result):
raiseAutomergeException(COMMAND_ERROR, "Error setting counter", command=cmd)
# clearEmailCounters
# function to remove automerge_email counters
def clearEmailCounters():
counters = getAutomergeCounters(prefix='automerge.email')
for counter in counters.keys():
cmd = ['counter','-f','-d',counter]
result = p4MarshalCmd(cmd,impersonate=False)
if containsError(result):
raiseAutomergeException(COMMAND_ERROR, "Error deleting counter", command=cmd)
# countElements
# examines the array for keys that start with the supplied prefix,
# returning the number of matching keys found
def countElements(array, prefix):
count = 0
loop = True
while loop:
key = "{0}{1}".format(prefix,count)
if not key in array:
loop = False
else:
count+=1
return count
# containsError
# utility function to check for any error code in the results array
def containsError(results=[],logError=True):
foundError = False
for r in results:
if 'code' in r:
if r['code'] == 'error':
foundError = True
if logError:
error(r['data'])
elif r['code'] == 'info':
#code info output can be important in troubleshooting
debug(r['data'])
return foundError
# compareLists
# checks to see if two lists are the same
def compareLists(l1, l2):
if len(l1) != len(l2):
return False
for i in range(len(l1)):
if type(l1[i]) != type(l2[i]):
return False
if type(l1[i]) is dict:
if not compareDicts(l1[i],l2[i]):
return False
else:
if l1[i] != l2[i]:
return False
return True
# compareDicts
# checks to see if two dicts are the same
def compareDicts(d1, d2):
for k in d1.keys():
if k not in d2:
return False
if d1[k] != d2[k]:
return False
return True
# str2bool
# converts provided string into a boolean equivalent
def str2bool(v):
return v.lower() in ("yes", "true", "t", "1")
# cleanupWorkspace
# performs a sync //path/...#none on each path in the
# branches.xml file to clean up the db.have and filesystem
def cleanupWorkspace():
info("Cleaning workspace and db.have")
paths = getDefinedBranchPaths()
for p in paths:
cmd = ["sync","{0}/...#none".format(p)]
p4MarshalCmd(cmd,quiet=True)
###########################################################################
##### PERFORCE COMMAND OPERATIONS
#####
# p4MarshalCmd
# executes the p4 command, results sent to a list
def p4MarshalCmd(cmd,quiet=False,impersonate=True):
if impersonate and g_runAs is not None:
login(g_runAs)
cmd = ["-u",g_runAs] + cmd
if not quiet:
debug("p4 {0}".format(" ".join(cmd)))
list = []
pipe = Popen(["p4", "-G"] + cmd, stdout=PIPE).stdout
try:
while 1:
record = marshal.load(pipe)
list.append(record)
except EOFError:
pass
pipe.close()
return list
# p4InputCmd
# executes the p4 command with input
def p4InputCmd(data,cmd,quiet=False,impersonate=True):
if impersonate and g_runAs is not None:
login(g_runAs)
cmd = ["-u",g_runAs] + cmd
if not quiet:
debug("p4 {0}".format(" ".join(cmd)))
list = []
proc = Popen(["p4", "-G"] + cmd, stdout=PIPE, stdin=PIPE)
outPipe = proc.stdout
inPipe = proc.stdin
marshal.dump(data, inPipe, 0)
inPipe.close()
try:
while 1:
record = marshal.load(outPipe)
list.append(record)
except EOFError:
pass
outPipe.close()
return list
# p4Cmd
# executes a p4 command, returns results
def p4Cmd(cmd,quiet=False,impersonate=True):
if impersonate and g_runAs is not None:
login(g_runAs)
cmd = ["-u",g_runAs] + cmd
if not quiet:
debug("p4 {0}".format(" ".join(cmd)))
proc = Popen(["p4"] + cmd,stdout=PIPE)
result = proc.communicate()[0]
return result
# getConnectionInfo
# performs a p4 info to determine current connection information
def getConnectionInfo():
global g_connectionInfo
results = p4MarshalCmd(["info"],quiet=True)
if containsError(results):
raiseAutomergeException(CONNECTION_ERROR, "Cannot connect to the server")
return results[0]
# getServerVersion
# retrieves the major and minor versions for the current server
def getServerVersion(serverVersion):
version = {'major' : None, 'minor' : None}
matches = re.search('P4D/[A-Z0-9]*/(\d*\.\d)/(\d*)\s.*',serverVersion)
if matches:
version['major'] = matches.group(1)
version['minor'] = matches.group(2)
return version
###########################################################################
##### USER OPERATIONS
#####
# login
# performs a login using the provided username
def login(username):
cmd = ["login", username]
result = p4MarshalCmd(cmd,quiet=True,impersonate=False)
if containsError(result):
raiseAutomergeException(LOGIN_ERROR, "Unable to login with username {0}".format(username),map={'user':username})
# checkLogin
# check the login ticket on the server
def checkLogin(username=""):
cmd = ["login","-s"]
if len(username)>0:
cmd = ["-u",username,"login","-s"]
result = p4MarshalCmd(cmd,quiet=True)
if containsError(result):
raiseAutomergeException(LOGIN_ERROR, "You need to login before using this command (use p4 login)",map={'user':username})
info("Logged in as user {0}".format(username))
# getUser
# retrieves the user information associated with the specified user
def getUser(username):
result = p4MarshalCmd(["users"],quiet=True)
if containsError(result):
raiseAutomergeException(LOGIN_ERROR, "Error retrieving information for user {0}".format(username),map={'user':username})
user = {}
foundUser = False
for line in result:
if line['User'] == username:
user = line
foundUser = True
if not foundUser:
raiseAutomergeException(LOGIN_ERROR, "Username {0} does not exist".format(username),map={'user':username})
return user
###########################################################################
##### CLIENT OPERATIONS
#####
# updateAutomergeClient
# update the client used by the automerge process
def updateAutomergeClient():
clientSpec = {
'Client' : g_connectionInfo['clientName'],
'Owner' : g_connectionInfo['userName'],
'Host' : g_hostname,
'Description' : "Automatically created for automerge"}
clientSpec['Root'] = os.path.dirname(g_baseDir)
view = [
g_configDepotPath + ' ' + g_configClientPath,
g_launcherDepotPath + ' ' + g_launcherClientPath,
g_automergeDepotPath + ' ' + g_automergeClientPath,
g_promoteLocksDepotPath + "..." + ' ' + g_promoteLocksClientPath + "..."
]
for i in range(0,len(view)):
clientSpec["View{0}".format(i)] = view[i]
paths = getDefinedBranchPaths()
i = len(view)
for path in paths:
left = path + "/..."
right = "{0}/tmp{1}/...".format(g_baseClientPath, path.replace("//","/"))
clientSpec["View{0}".format(i)] = "{0} {1}".format(left, right)
i += 1
result = p4InputCmd(clientSpec, ["client","-i"],quiet=True)
if containsError(result):
raiseAutomergeException(COMMAND_ERROR, "Error updating client {0}".format(g_connectionInfo['clientName']),map={'clientSpec':clientSpec})
info("Updating workspace: {0}".format(g_connectionInfo['clientName']))
###########################################################################
##### JOB/FIX OPERATIONS
#####
# getFixes
# retrieves the fixes associated with a particular changelist
def getFixes(changelistNumber):
cmd = ["fixes","-c",str(changelistNumber)]
fixes = p4MarshalCmd(cmd)
if containsError(fixes):
raiseAutomergeException(COMMAND_ERROR, "Error retrieving fixes for changelist {0}".format(changelistNumber), command=cmd)
return fixes
# deleteFix
# removes a specific fix (job/changelist combination)
def deleteFix(changelistNumber, jobName):
cmd = ["fix","-d","-c",str(changelistNumber),jobName]
result = p4MarshalCmd(cmd)
if containsError(result):
raiseAutomergeException(COMMAND_ERROR, "Error deleting fix ({0},{1})".format(changelistNumber, jobName), command=cmd)
# cleanFixes
# removes jobs associated with a changelist
def cleanFixes(changelistNumber):
fixes = getFixes(changelistNumber)
for f in fixes:
deleteFix(changelistNumber, f['Job'])
# getJobs
# returns a list of dictionaries where the key 'job' holds the job
# number and the key 'jobstat' holds the job status
def getJobs(changelist):
jobs = []
counter = 0
while True:
label = "job{0}".format(counter)
if label in changelist:
jobs.append({'job' : changelist[label], 'jobstat' : 'open'})
else:
break
counter+=1
return jobs
###########################################################################
##### DEPOT FILE OPERATIONS
#####
# countDepotFiles
# counts the number of depot files in the given changelist
def countDepotFiles(changelist):
counter = 0
while True:
label = "depotFile{0}".format(counter)
if label in changelist:
counter+=1
else:
break
return counter
# depotFileExists
# checks to see if the specified file exists. The function returns
# True/False
def depotFileExists(path,showDeleted=False):
exists = False
cmd = ["files",path]
result = p4MarshalCmd(cmd,quiet=True)[0]
if result['code'] == 'stat':
if (result['action'] == 'move/delete') or (result['action'] == 'delete'):
if showDeleted:
exists = True
else:
exists = True
return exists
# fstat
# returns a dictionary based on the fstat of the given path
def fstat(path):
cmd = ["fstat",path]
result = p4MarshalCmd(cmd,quiet=True)
return result[0]
# getTextFile
# returns the text file from the depot at the specified path
def getTextFile(path):
cmd = ["print",path]
result = p4MarshalCmd(cmd,quiet=True)
if result[0]['code'] == 'error':
raiseAutomergeException(COMMAND_ERROR, "Error retrieving file from depot", command=cmd)
if result[0]['type'] != 'text':
raiseAutomergeException(COMMAND_ERROR, "Error depot file is not a text file", command=cmd)
if len(result) != 3:
raiseAutomergeException(COMMAND_ERROR, "Too many files returned from depot -- path must specify one file", command=cmd)
return result[1]['data']
###########################################################################
##### CHANGELIST OPERATIONS
#####
# getChangelist
# retrieve the changelist specified
def getChangelist(number):
change = None
cmd = ["describe",str(number)]
result = p4MarshalCmd(cmd,quiet=True)
if containsError(result):
if not 'no such changelist' in result[0]['data']:
raiseAutomergeException(COMMAND_ERROR, "Error while trying to retrieve changelist {0}".format(number), command=cmd)
else:
change = result[0]
return change
# createChangelist
# creates a changelist using the provided arguments.
def createChangelist(user=None, client=None, description="[CREATED BY AUTOMERGE]", jobs=[]):
num = -1
if user is None:
user = g_connectionInfo['userName']
if g_runAs is not None:
user = g_runAs
if client is None:
client = g_connectionInfo['clientName']
changeSpec = {
'Change' : 'new',
'Client' : client,
'User' : user,
'Status' : 'new',
'Description' : description}
results = p4InputCmd(changeSpec, ["change","-i"], quiet=False)
if containsError(results):
raiseAutomergeException(CHANGELIST_ERROR, "Error while creating a changelist", {'changeSpec':changeSpec})
if len(results) == 1:
m = re.search("Change ([0-9]*) created",results[0]['data'])
if m:
num = m.group(1)
for i in range(0,len(jobs)):
cmd = ["fix","-s",jobs[i]['jobstat'],"-c",num,jobs[i]['job']]
result = p4MarshalCmd(cmd)
if containsError(result):
raiseAutomergeException(COMMAND_ERROR, "Error creating fix ({0},{1})".format(num, jobs[i]['job']), command=cmd)
return num
# updateChangelist
# updates the specified changelist
def updateChangelist(changelist):
changeSpec = {
'Change' : changelist['change'],
'Client' : changelist['client'],
'User' : changelist['user'],
'Status' : 'pending',
'Description' : changelist['desc']}
#Populate Files and Jobs fields
#Note: changelist is passed in as a dictionary formated as
#the output of p4 describe, but we need to convert all fields
#to the dictionary format needed by p4 change
#depotFile<index> from p4 describe = Files<index> in p4 change
#job<index> from p4 describe = Jobs<index> in p4 change
for field in changelist:
matchFile = re.search('depotFile([0-9]+)', field)
matchJob = re.search('job([0-9]+)',field)
if matchFile:
fileIndex = matchFile.group(1)
changeSpec['Files'+fileIndex]=changelist['depotFile'+fileIndex]
elif matchJob:
fileIndex = matchJob.group(1)
changeSpec['Jobs'+fileIndex]=changelist['job'+fileIndex]
results = p4InputCmd(changeSpec, ["change","-i"], quiet=False)
if containsError(results):
raiseAutomergeException(CHANGELIST_ERROR, "Error while updating a changelist", map={'changeSpec':changeSpec})
# deletePendingChangelist
# reverts files, cleans fixes, and deletes the specified changelist
def deletePendingChangelist(changelist):
path = "//" + changelist['client'] + "/..."
# first revert the files in the changelist
cmd = ["revert","-c",changelist['change'], path]
result = p4Cmd(cmd)
if 'unknown' in result.lower():
raiseAutomergeException(COMMAND_ERROR, "Error reverting changelist {0}".format(num), command=cmd)
if 'already committed' in result:
raiseAutomergeException(COMMAND_ERROR, "Error reverting changelist {0}".format(num), command=cmd)
# now remove any fixes associated with the changelist
cleanFixes(changelist['change'])
# now the changelist can be deleted
cmd = ["change","-d",changelist['change']]
result = p4Cmd(cmd)
if 'unknown' in result.lower():
raiseAutomergeException(COMMAND_ERROR, "Error deleting changelist {0}".format(changelist['change']), command=cmd)
# submitChangelist
# submits the given changelist number
def submitChangelist(changelistNumber,bypassConfirm=False):
result = None
submit = True
# if not runing in daemon mode, then we want to prompt the user to confirm
# the submission of the changelist.
if not (g_args.daemonMode or g_args.auto or bypassConfirm):
submit = False
prompt = "Submit changelist {0}? [N]|[Y] : ".format(changelistNumber)
while True:
sys.stdout.flush()
ans = upper(raw_input(prompt))
if not ans:
submit = False
break
if ans not in ['Y','N']:
print 'please enter Y or N.'
continue
if ans == 'Y':
submit = True
break
if ans == 'N':
submit = False
break
if submit:
# ready to submit, so let's append [submitted by automerge] to the end of the changelist description
# so the submit trigger will bypass the checks used to verify that the submit was performed by
# the vsubmit.py script
changelist = getChangelist(changelistNumber)
if not '[submitted by automerge]' in changelist['desc']:
changelist['desc'] += '\n[submitted by automerge]'
updateChangelist(changelist)
debug("Appended [submitted by automerge] to change description")
cmd = ["submit","-c",str(changelistNumber)]
result = p4MarshalCmd(cmd)
if containsError(result):
raiseAutomergeException(COMMAND_ERROR, "Error submitting changelist {0}".format(changelistNumber), command=cmd)
else:
info("Change Submitted\n")
else:
exit("Changelist {0} not submitted... check your workspace".format(changelistNumber))
return result
### end of submitChangelist
# describeChangelist
# retrieves the details for a changelist
def describeChangelist(changelistNumber):
cmd = ["describe","-s",str(changelistNumber)]
result = p4MarshalCmd(cmd)
if containsError(result):
raiseAutomergeException(COMMAND_ERROR, "Error retrieving changelist {0}".format(changelistNumber), command=cmd)
return result
# getChangelistFiles
# retrieves the depot files associated with the specified changelist
def getChangelistFiles(changelistNumber):
# Extract the names of files from the source changelist and build up a list
# Of files we expect to see in the merge for this changelist...
fileList = []
files = p4MarshalCmd(["files","//...@{0},@{0}".format(changelistNumber)])
for file in files:
if file['code'] == 'error':
print "ERROR getting files from changelist " + changelistNumber
fileList.append(file['depotFile'])
return fileList
# getLastCopyupChangelist
# retrieves the last "copy-up" changelist number for the given branchspec
# this is used to determine what moves/deletes need to get propagated
# to the target. If no last "copy-up" changelist is found, then 0 is returned
def getLastCopyupChangelist(branchSpec):
lastChangelistNumber = 0
source = getMergeSource(branchSpec)
searchDesc = "Copy-up: Promote using {0}".format(branchSpec)
sourcePath = getBranchPath(source) + "/..."
cmd = ["changes",sourcePath]
results = p4MarshalCmd(cmd)
if containsError(results):
raiseAutomergeException(COMMAND_ERROR, "Error retrieving changes", command=cmd)
# loop over the results, if any
for r in results:
# check to see if the description starts with "Copy-up: Promote using"
if r['desc'].startswith("Copy-up: Promote using"):
# if it does, then we need to look at the changelist more closely, specifically
# checking to see if the search description value, which contains the branch
# specification name, is in the changelist
cl = getChangelist(r['change'])
if searchDesc in cl['desc']:
# if we find this in the description, save the changelist number and
# we're done!
lastChangelistNumber = int(cl['change'])
break
return lastChangelistNumber
###########################################################################
##### BRANCH/MERGE CONFIGURATION FILE OPERATIONS
#####
# getDefinedBranchPaths
# retrieves all of the "path" attributes from the branches.xml file
# and returns these as a list
def getDefinedBranchPaths():
branchPaths = []
branches = g_branchesXML.findall("./branch")
for b in branches:
branchPaths.append(b.attrib['path'])
return branchPaths
# getBranchPath
# examines the branches.xml file and retrieves the path corresponding to
# the name provided. If no name is found, None is returned
def getBranchPath(name):
branchPath = None
branches = g_branchesXML.findall("./branch")
for b in branches:
if b.attrib['name'] == name:
branchPath = b.attrib['path']
break
return branchPath
# getBranchOwner
# examines the branches.xml file and retrieves the owner corresponding to
# the name provided. If no name is found, None is returned
def getBranchOwner(name):
branchOwner = None
branches = g_branchesXML.findall("./branch")
for b in branches:
if b.attrib['name'] == name:
branchOwner = b.attrib['owner']
break
return branchOwner
# getAutomergeBranchspecs
# retrieves the merge elements from the merges.xml file that have the direction attribute
# 'MergeDown' or 'Both' and the auto attribute of 'true'
def getAutomergeBranchspecs():
merges = []
direction = ['MergeDown','Both']
results = g_mergesXML.findall("./merge[@auto='true']")
for r in results:
if r.attrib['direction'] in direction:
merges.append(r.attrib)
if len(merges) == 0:
raiseAutomergeException(CONFIGURATION_ERROR, "No automatic merges configured in the merges.xml file")
return merges
# getMerge
# examines the merges.xml file and retrieves the nodes corresponding to the
# branchSpec provided. By default, only merges with the direction 'MergeDown'
# or 'Both' are returned.
def getMerge(branchSpec, direction="MergeDown"):
merges = []
if type(direction) is not list:
direction = [direction]
if not 'Both' in direction:
direction.append('Both')
results = g_mergesXML.findall("./merge[@branchSpec='{0}']".format(branchSpec,direction))
if len(results) > 1:
raiseAutomergeException(CONFIGURATION_ERROR, "More than one entry for branch {0} exists in the merges.xml file".format(branchSpec))
for r in results:
if r.attrib['direction'] in direction:
merges.append(r)
if len(merges) == 0:
raiseAutomergeException(CONFIGURATION_ERROR, "No entry for branch {0} in the merges.xml file".format(branchSpec))
return merges[0].attrib
# getMergeTarget
# retrieves the target for the specified branchspec. If no target or more than one target is found,
# that means merges.xml is misconfigured, and an AutomergeException is raised
def getMergeTarget(branchSpec):
results = g_mergesXML.findall("./merge[@branchSpec='{0}']".format(branchSpec))
if len(results) == 0:
raiseAutomergeException(CONFIGURATION_ERROR, "No entries for branch {0} in the merges.xml file".format(branchSpec))
if len(results) > 1:
raiseAutomergeException(CONFIGURATION_ERROR, "More than one entry for branch {0} exists in the merges.xml file".format(branchSpec))
return results[0].attrib['target']
# getMergeSource
# retrieves the target for the specified branchspec. If no target or more than one target is found,
# that means merges.xml is misconfigured, and an AutomergeException is raised
def getMergeSource(branchSpec):
results = g_mergesXML.findall("./merge[@branchSpec='{0}']".format(branchSpec))
if len(results) == 0:
raiseAutomergeException(CONFIGURATION_ERROR, "No entries for branch {0} in the merges.xml file".format(branchSpec))
if len(results) > 1:
raiseAutomergeException(CONFIGURATION_ERROR, "More than one entry for branch {0} exists in the merges.xml file".format(branchSpec))
return results[0].attrib['source']
###########################################################################
##### BRANCHSPEC OPERATIONS
#####
# getBranchSpec
# retrieves the specified branch spec
def getBranchSpec(branchSpec):
cmd = ["branch","-o",branchSpec]
results = p4MarshalCmd(cmd)
if containsError(results):
raiseAutomergeException(COMMAND_ERROR, "Error obtaining branch specification {0}".format(branchSpec),command=cmd)
return results[0]
# getBranchSpecMappings
# retrieves the mappings (View(n) fields) from the specified branch spec
def getBranchSpecMappings(branchSpec):
mappings = []
counter=0
results = getBranchSpec(branchSpec)
while True:
key = "View{0}".format(counter)
if not key in results:
break
mappings.append(results[key])
counter+=1
return mappings
# saveBranchSpecMappings
# updates the view in the specified branch spec with the mappings provided
# in the array. This completely replaces the view of the existing branch
# spec.
def saveBranchSpecMappings(branchSpec, mappings):
oldSpec = getBranchSpec(branchSpec)
newSpec = {
'Branch' : oldSpec['Branch'],
'Owner' : oldSpec['Owner'],
'Description' : oldSpec['Description'],
'Options' : oldSpec['Options']}
for i in range(0,len(mappings)):
newSpec["View{0}".format(i)] = mappings[i]
results = p4InputCmd(newSpec, ["branch","-i"],quiet=True)
if results[0]['code'] == 'error':
raiseAutomergeException(CONFIGURATION_ERROR, "Error obtaining branch specification")
# displayBranchSpecMappings
# a convenience routine to display the View of the specified branch spec
def displayBranchSpecMappings(branchSpec):
mappings = getBranchSpecMappings(branchSpec)
info("Mappings for branch spec {0}:".format(branchSpec))
for m in mappings:
info(" {0}".format(m))
# clearBranchSpecMappings
# clears all of the lines in the View of the branch spec except the
# first one. The function prompts the user to confirm the changes,
# either saving the branch spec or returning without saving
def clearBranchSpecMappings(branchSpec):
mappings = getBranchSpecMappings(branchSpec)
changed = False
for i in range(1,len(mappings)):
changed = True
del mappings[1]
if changed:
info("")
info("New mappings for branch spec {0}".format(branchSpec))
for m in mappings:
info(" {0}".format(m))
info("")
info("What do you want to do?")
info(" (s)ave mappings")
info(" (r)eturn without saving")
while True:
sys.stdout.flush()
ans = upper(raw_input("choice: "))
if ans not in ['S','R']:
info('> choice not recognized... reenter')
continue
if ans == 'S':
saveBranchSpecMappings(branchSpec, mappings)
break
if ans == 'R':
info("discarding changes...")
break
### end of clearBranchSpecMappings
# editBranchSpecMappings
# steps through each View in the branch spec, allowing the user to skip over the
# mapping, or delete it.
def editBranchSpecMappings(branchSpec):
mappings = getBranchSpecMappings(branchSpec)
info("Editing mappings for branch spec {0} (one at a time):".format(branchSpec))
changed = False
for m in mappings:
prompt = "{0}{1} (s)kip (d)elete (m)odify e(x)it : ".format(m,os.linesep)
while True:
sys.stdout.flush()
ans = upper(raw_input(prompt))
if ans not in ['S','D','M','X']:
info('>>> choice not recognized... reenter')
continue
if ans == 'S':
break
if ans == 'D':
mappings.remove(m)
changed = True
break
if ans == 'M':
break
if ans == 'X':
break
if changed:
info("")
info("New mappings for branch spec {0}".format(branchSpec))
for m in mappings:
info(" {0}".format(m))
info("")
info("What do you want to do?")
info(" (s)ave mappings")
info(" (r)eturn without saving")
while True:
sys.stdout.flush()
ans = upper(raw_input("choice: "))
if ans not in ['S','R']:
info('> choice not recognized... reenter')
continue
if ans == 'S':
saveBranchSpecMappings(branchSpec, mappings)
break
if ans == 'R':
info("discarding changes...")
break
### end of editBranchSpecMappings
# isLocked
# function to determine if the branch in question has a lock file
# (with a lockuser entry) in the g_promoteLocksDepotPath depot
# location
def isLocked(branchName):
locked = False
promoteStatusPath = g_promoteLocksDepotPath + branchName + ".lck"
if depotFileExists(promoteStatusPath):
results = p4MarshalCmd(['grep','-e','lockuser',promoteStatusPath],quiet=True)
if containsError(results):
raiseAutomergeException(CONFIGURATION_ERROR, "unable to get information from lock file for branch {0}".format(branchName))
if len(results) > 0:
if 'matchedLine' in results[0]:
locked = True
return locked
# getLockStatus
# returns the contents of the lock file as a dictionary
def getLockStatus(branchName):
status = {}
promoteStatusPath = g_promoteLocksDepotPath + branchName + ".lck"
if depotFileExists(promoteStatusPath):
data = getTextFile(promoteStatusPath)
for line in data.split(os.linesep):
if not line.startswith("#"):
if '=' in line:
parts = line.split("=",1)
if len(parts) == 2:
status[parts[0]] = parts[1]
return status
# lockBranch
# creates a lock file <branchname>.lck in the g_promoteLocksDepotPath
# with the current user stored as the property lockuser and a timestmp
# stored with the property locktime
def lockBranch(branchName):
if isLocked(branchName):
status = getLockStatus(branchName)
raiseAutomergeException(CONFIGURATION_ERROR, "branch {0} is already locked".format(branchName),map={'branchName':branchName,'status':status})
promoteStatusPath = g_promoteLocksDepotPath + branchName + ".lck"
info ("Creating pending change to edit {0} in order to LOCK parent branch".format(branchName + ".lck"))
changelistNumber = createChangelist(description="LOCKING branch {0}".format(branchName))
if depotFileExists(promoteStatusPath):
cmd = ["edit","-c",str(changelistNumber),promoteStatusPath]
result = p4MarshalCmd(cmd)
if containsError(result):
raiseAutomergeException(CONFIGURATION_ERROR, "unable to edit branch lock file for branch {0}".format(branchName))
result = p4MarshalCmd(["where",promoteStatusPath])
if containsError(result):
raiseAutomergeException(CONFIGURATION_ERROR, "unable to get client location for lock file for branch {0}".format(branchName))
localPath = result[0]['path']
lockfile = open(localPath, "w")
lockfile.write("##### LOCKING BRANCH")
lockfile.write(os.linesep)
lockfile.write("lockuser={0}".format(g_connectionInfo['userName']))
lockfile.write(os.linesep)
lockfile.write("locktime={0}".format(time.time()))
lockfile.write(os.linesep)
lockfile.close()
if not depotFileExists(promoteStatusPath):
cmd = ["add","-c",str(changelistNumber),promoteStatusPath]
result = p4MarshalCmd(cmd)
if containsError(result):
raiseAutomergeException(CONFIGURATION_ERROR, "unable to add branch lock file for branch {0}".format(branchName))
submitChangelist(changelistNumber,bypassConfirm=True)
### end of lockBranch
# unlockBranch
# clears out the contents of the lock file, replacing it with a comment
def unlockBranch(branchName):
promoteStatusPath = g_promoteLocksDepotPath + branchName + ".lck"
if isLocked(branchName):
info ("Creating pending change to edit {0} in order to UNLOCK parent branch".format(branchName + ".lck"))
changelistNumber = createChangelist(description="UNLOCKING branch {0}".format(branchName))
cmd = ["edit","-c",str(changelistNumber),promoteStatusPath]
result = p4MarshalCmd(cmd)
if containsError(result):
raiseAutomergeException(CONFIGURATION_ERROR, "unable to edit branch lock file for branch {0}".format(branchName))
result = p4MarshalCmd(["where",promoteStatusPath])
if containsError(result):
raiseAutomergeException(CONFIGURATION_ERROR, "unable to get client location for lock file for branch {0}".format(branchName))
localPath = result[0]['path']
lockfile = open(localPath, "w")
lockfile.write("##### UNLOCKED BRANCH")
lockfile.write(os.linesep)
lockfile.close()
submitChangelist(changelistNumber,bypassConfirm=True)
else:
warn("branch {0} isn't locked".format(branchName))
###########################################################################
##### INTEGRATION OPERATIONS
#####
# listIntegrationTargetFiles
# routine to retrieve a list of the target files that will be involved in the
# integration
def listIntegrationTargetFiles(branchSpec, changelist, skipDeleted=True):
debug("listIntegrationTargetFiles")
files = []
path = "//...@{0}".format(changelist['change'])
cmd = ["integrate","-t","-n","-o","-b",branchSpec,path]
mergeItems = p4MarshalCmd(cmd)
for item in mergeItems:
if item['code'] == 'error':
if "can't integrate from" in item['data']:
raiseAutomergeException(INTEGRATE_ERROR, "Cannot integrate branch {0} for changelist @{1}".format(branchSpec,changelist['change']),map={'branchSpec':branchSpec, 'changelist':changelist})
if "can't branch from" in item['data']:
raiseAutomergeException(INTEGRATE_ERROR, "Cannot branch from {0} for changelist @{1}".format(branchSpec,changelist['change']),map={'branchSpec':branchSpec, 'changelist':changelist})
if 'depotFile' in item:
if skipDeleted == True:
if item['action'] == 'delete':
continue
files.append(item['depotFile'])
return files
# lockTargetFiles
# routine to edit and lock the target files that will be involved in the integration
def lockTargetFiles(branchSpec, changelist, newChangelistNum):
files = listIntegrationTargetFiles(branchSpec, changelist)
for f in files:
if depotFileExists(f):
debug("locking {0}".format(f))
# need to sync the file in case it isn't in our workspace
cmd = ["sync","-f",f]
result = p4MarshalCmd(cmd,quiet=True)
# now to edit the file so it can be locked
cmd = ["edit","-c",str(newChangelistNum),f]
result = p4MarshalCmd(cmd,quiet=True)
if containsError(result):
raiseAutomergeException(COMMAND_ERROR, "Error executing edit command", command=cmd)
# next lock the file
cmd = ["lock","-c",str(newChangelistNum),f]
result = p4MarshalCmd(cmd,quiet=True)
if containsError(result):
raiseAutomergeException(COMMAND_ERROR, "Error executing lock command", command=cmd)
# getOpened
# returns a list of the files opened on the provided path
def getOpened(path):
opened = []
cmd = ["opened","{0}/...".format(path)]
results = p4MarshalCmd(cmd)
if containsError(results):
raiseAutomergeException(COMMAND_ERROR, "Error getting opened files", command=cmd)
for r in results:
if r['code'] == 'stat':
opened.append(r['depotFile'])
return opened
# doIntegrate
# performs the actual integration for the specified parameters. By default,
# integrate performs a preview (-n) so if you want to perform an actual
# integration then you must set preview=False in the function call
def doIntegrate(branchSpec,syncChangelistNumber,newChangelistNumber=0,preview=True):
debug("doIntegrate()")
cmd = ["integrate","-t"]
if preview:
cmd += ["-n"]
else:
cmd += ["-c",str(newChangelistNumber)]
cmd += ["-b",branchSpec,"//...@{0}".format(syncChangelistNumber)]
results = p4MarshalCmd(cmd)
if containsError(results):
cl = getChangelist(syncChangelistNumber)
raiseAutomergeException(INTEGRATE_ERROR, "Error running integrate command", command=cmd, map={'changelist':cl})
return results
# doResolve
# performs the resolve on the given path. By default, the function
# operates in preview mode (-n) so if you want to actually resolve
# then you need to set preview=False in the function call
def doResolve(path,preview=True):
cmd = ["resolve"]
if preview:
cmd += ["-n"]
cmd += ["-am","{0}/...".format(path)]
results = p4MarshalCmd(cmd)
if containsError(results,logError=False):
if "no file(s) to resolve" in results[0]['data']:
results = []
else:
raiseAutomergeException(RESOLVE_ERROR, "Error running resolve command", command=cmd, map={'results':results})
return results
# doDiff2
# counts the number of files returned by doing a diff2 on the branch specification
def doDiff2(branchSpec):
diffCount = 0
cmd = ["diff2","-q","-b",branchSpec]
results = p4MarshalCmd(cmd)
if results[0]['code'] == 'error':
if 'No differing files' in results[0]['data']:
diffCount = 0
else:
raiseAutomergeException(COMMAND_ERROR, "Error running diff2 command", command=cmd)
else:
for line in results:
if 'depotFile' in line:
diffCount += 1
return diffCount
def promote(branchSpec, changelistNumber):
cmd = ["copy","-r","-c",str(changelistNumber),"-b",branchSpec]
results = p4MarshalCmd(cmd)
if results[0]['code'] == 'error':
raiseAutomergeException(COMMAND_ERROR, "Error running copy command", command=cmd)
return results
###########################################################################
##### COPY-UP OPERATIONS
#####
# doCopyUp
# performs the steps involved in the merge-down/copy-up process
# p4 opened //target_of_branch_spec/... (Abort if files opened).
# p4 integ -n -t -b branch_spec
# p4 integ -r -t -b branch_spec
# p4 resolve -at //target_of_branch_spec/...
# p4 submit -d "Promote using branch_spec." //target_of_branch_spec/...
# p4 diff2 -q -b branch_spec (Done if no diffs)
# p4 copy -r -b branch_spec
# p4 submit -d "Promote follow-up using branch_spec." //target_of_branch_spec/...
def doCopyUp():
merge = getMerge(g_args.branchSpec,"CopyUp")
if merge['direction'] == 'CopyUp':
# if the direction specified in merges.xml is 'CopyUp', then source and target
# need to be be inverted
targetPath = getBranchPath(merge['source'])
sourcePath = getBranchPath(merge['target'])
else:
sourcePath = getBranchPath(merge['source'])
targetPath = getBranchPath(merge['target'])
# p4 opened //sourcePath/... (Abort if files opened).
opened = getOpened(sourcePath)
if len(opened) > 0:
error("*** PROMOTION NOT ALLOWED ***")
error("*** Copy-up cannot occur if there are opened files. The following are open in your workspace:")
for f in opened:
error(" {0}".format(f))
exit("*** FINISHED - ERROR ***\n")
# p4 opened //targetPath/... (Abort if files opened).
opened = getOpened(targetPath)
if len(opened) > 0:
error("*** PROMOTION NOT ALLOWED ***")
error("*** Copy-up cannot occur if there are opened files. The following are open in your workspace:")
for f in opened:
error(" {0}".format(f))
exit("*** FINISHED - ERROR ***\n")
try:
# lock the branch before starting the normalization, as the time this takes is variable.
# the branch is unlocked in the finally block
lockBranch(getMergeSource(g_args.branchSpec))
# p4 integ -n -t -b branch_spec
results = p4MarshalCmd(["integrate","-n","-t","-b",g_args.branchSpec])
# check to see if preview indicates that integration flags are needed
flagCheck = checkForIntegrationFlagMessages(results)
if flagCheck['flag']:
datamap = {'preview':results,'branchSpec':g_args.branchSpec,'flagCheck':flagCheck}
raiseAutomergeException(INTEGRATEFLAG_ERROR, "Integration unable to complete", map=datamap)
#check to see if there are ignores in branch map, if so warn user
branchMapEntries = getBranchSpecMappings(g_args.branchSpec)
foundIgnoreEntries = False
for entry in branchMapEntries:
if re.search("^-",entry):
foundIgnoreEntries = True
if foundIgnoreEntries:
warn("Branch map currently contains ignore entries.")
warn("Changes to files matching these entries may not have")
warn("been merged from the source to the target. If these entries")
warn("are removed during branch-map normalization, changes to these files")
warn("that have been made in the source branch will be lost (overwritten).")
warn("If this is not the intent,")
warn("exit the script, remove the ignore entries from the branch-map,")
warn("and perform a final merge-down to integrate changes made to these")
warn("files in the source stream with changes made in the target stream.")
prompt = " Proceed with promote? (c)ontinue or (q)uit: "
while True:
sys.stdout.flush()
ans = upper(raw_input(prompt))
if not ans:
break
if ans not in ['C','Q']:
info(' please enter c or q.')
continue
if ans == 'C':
break
if ans == 'Q':
exit()
# if everything is integrated, then the integrate command will produce an 'error', which
# is detected below (All revisions already integrated). If no error is detected, then there
# are files requiring integration, and we should bail out with an appropriate message
if results[0]['code'] == 'error':
if 'All revision(s) already integrated' in results[0]['data']:
info("Verified ready to promote... merge down was done")
else:
error("*** PROMOTION FAILED ***")
error(results[0]['data'])
exit("*** FINISHED - ERROR ***\n")
else:
error("*** PROMOTION NOT ALLOWED ***")
error("Merge-down needs to be performed on branch {0}".format(g_args.branchSpec))
exit("*** FINISHED - ERROR ***\n")
while True:
info("\n\n*** Normalize Branch Map ***")
info("The branch map now needs to be normalized prior to the copy-up.")
info("Any mappings that record moves in the target branch should now be removed.")
info("Also, any ignore mappings added in order to intentionally diverge")
info("files in the target branch should be removed as well.")
info("(NOTE: the only ignore entries that should remain in the branch map")
info("are those that were used to diverge files in the target branch, but")
info("it is not desired to promote the changes in these diverged files.)")
info("")
displayBranchSpecMappings(g_args.branchSpec)
info("")
info("What do you want to do?")
info(" (d)elete all branch mappings except the first")
info(" (e)dit the branch mappings")
info(" (m)anually edit the branch mappings externally")
info(" (c)ontinue with the promotion routine")
info(" e(x)it the script")
prompt = "choice : "
sys.stdout.flush()
ans = upper(raw_input(prompt))
if ans not in ['E','D','M','C','X']:
info('> choice not recognized... reenter')
continue
if ans == 'E':
editBranchSpecMappings(g_args.branchSpec)
if ans == 'D':
clearBranchSpecMappings(g_args.branchSpec)
if ans == 'M':
info(">>> EDIT THE BRANCH MAPPINGS IN ANOTHER TOOL")
sys.stdout.flush()
enter = raw_input(" Press enter when ready...")
continue
if ans == 'C':
break
if ans == 'X':
exit()
# get the last copy-up changelist applied to the source from the target
# look for any changelists since the last copy-up
# examine these changelists for moved files and perform move(s) in the target,
# as necessary
lastCopyUpCL = getLastCopyupChangelist(g_args.branchSpec)
# create a path to select all changes from the last copyup changelist to #head
path = "{0}/...@{1},#head".format(targetPath,lastCopyUpCL)
# use the 'changes' command to get a list of changelists
cmd = ["changes",path]
results = p4MarshalCmd(cmd)
# loop over the changelists, looking for moved files
for r in results:
checkForMovedFiles(g_args.branchSpec,r['change'])
results = p4MarshalCmd(["integrate","-r","-n","-t","-b",g_args.branchSpec])
if results[0]['code'] == 'error':
if 'All revision(s) already integrated' in results[0]['data']:
exit(results[0]['data'])
else:
errorExit(results[0]['data'])
newChangelistNum = createChangelist(description="Copy-up: Promote using {0}".format(g_args.branchSpec))
# p4 integ -r -t -b branch_spec
results = p4MarshalCmd(["integrate","-r","-c",str(newChangelistNum),"-t","-b",g_args.branchSpec])
if results[0]['code'] == 'error':
if 'All revision(s) already integrated' in results[0]['data']:
exit(results[0]['data'])
else:
errorExit(results[0]['data'])
# p4 resolve -at //target_of_branch_spec/...
resolveResults = p4MarshalCmd(["resolve","-at","{0}/...".format(sourcePath)])
if resolveResults[0]['code'] == 'error':
errorExit(resolveResults[0]['data'])
info("\n*** REVIEW PENDING CHANGELIST ***")
info("Pending changelist {0} has been created to perform the copy-up.".format(newChangelistNum))
info("Review this this changelist before proceeding. Verify this changelist")
info("contains all files that should be promoted.")
submitResults = submitChangelist(newChangelistNum)
if submitResults[0]['code'] == 'error':
errorExit(submitResults[0]['data'])
# p4 diff2 -q -b branch_spec (Done if no diffs)
count = doDiff2(g_args.branchSpec)
if count>0:
info("Need to promote {0} file(s)".format(count))
newChangelistNum = createChangelist(description="Copy-up: Promote follow-up using {0}".format(g_args.branchSpec))
# p4 copy -r -b branch_spec
promoteResults = p4MarshalCmd(["copy","-r","-c",str(newChangelistNum),"-b",g_args.branchSpec])
if promoteResults[0]['code'] == 'error':
errorExit(promoteResults[0]['data'])
submitResults = submitChangelist(newChangelistNum)
if submitResults[0]['code'] == 'error':
errorExit(submitResults[0]['data'])
else:
info("Follow-up promote not required")
finally:
unlockBranch(getMergeSource(g_args.branchSpec))
### end of doCopyUp
###########################################################################
##### MERGE-DOWN OPERATIONS
#####
# checkResolveResults
# process the results of the resolve command
# Returns dictionary where key 'ok' stores True if all Files AutoResolved Sucessfully,
# False if there were conflicts, key 'skipped' stores a list of fileInfo
# for each file that could not be autoresolved
def checkResolveResults(results):
LINES_PER_RESOLVED_FILE = 3
check = {'ok': True, 'skipped':[]}
for i in range(1,len(results)/LINES_PER_RESOLVED_FILE + 1):
fileInfo = results[i*LINES_PER_RESOLVED_FILE-LINES_PER_RESOLVED_FILE]
action = results[i*LINES_PER_RESOLVED_FILE-1]
if re.compile("resolve skipped").search(action['data']):
check['ok'] = False
check['skipped'].append(fileInfo['data'])
return check
# unlockfile
# issues the unlock command on the specified path
def unlockfile(path):
result = p4MarshalCmd(["unlock",path])
# checkForIntegrationFlagMessages
# returns a dictionary indicating which, if any, integration flags were
# present in the result set. This is then interrogated to determine whether
# manual intervention is needed. Basically, if ret['flag'] is set to true,
# then processing stops
def checkForIntegrationFlagMessages(results):
ret = {'flag' : False, 'Ds' : False, 'Dt' : False, 'Di' : False}
for r in results:
if 'data' in r:
if 'without -d or -Ds flag' in r['data']:
ret['flag'] = True
ret['Ds'] = True
if 'without -d or -Dt flag' in r['data']:
ret['flag'] = True
ret['Dt'] = True
if 'without -d or -Di flag' in r['data']:
ret['flag'] = True
ret['Di'] = True
return ret
# checkPreviewCount
# this function checks the number of files in the resolve preview against
# the files in the changelist. The 'action' if blocks determine whether to skip
# an entry (next) or count it (counter+=1). Ultimately, the counter is compared
# to the results preview, returning the whether the resolve preview count is
# what was expected (True or False)
def checkPreviewCount(changelist, rp):
counter = 0
for i in range(countDepotFiles(changelist)):
actionLabel = "action{0}".format(i)
if changelist[actionLabel] == 'add':
next
elif changelist[actionLabel] == 'branch':
next
elif changelist[actionLabel] == 'delete':
next
elif changelist[actionLabel] == 'move/delete':
next
elif changelist[actionLabel] == 'move/add':
counter+=1
elif changelist[actionLabel] == 'edit':
counter+=1
elif changelist[actionLabel] == 'integrate':
counter+=1
else:
debug("cannot identify action: {0}".format(changelist[actionLabel]))
counter+=1
debug("checking preview count: len(rp)={0}, counter={1}".format(len(rp),counter))
return (len(rp) == counter)
# getInterchanges
# function to execute the interchanges command on the specified
# branch, looking up to the changelist specified by label (if provided)
def getInterchanges(branchSpec, label=""):
interchanges = []
path = ""
if len(label)>0:
path = "//...@{0}".format(label)
else:
path = "//..."
cmd = ["interchanges","-b",branchSpec,path]
results = p4MarshalCmd(cmd);
if results[0]['code'] == 'error':
if "All revision(s) already integrated" in results[0]['data']:
# this is ok... it just means that there are no changelists to be integrated
debug("### All revisions(s) already integrated")
interchanges = []
else:
error(results[0]['data'])
raiseAutomergeException(COMMAND_ERROR, "Error running interchanges command",command=cmd)
else:
for r in results:
interchanges.append(r)
return interchanges
# mergeDown
# the main method to "merge down" the given changelist. Comments inline
def mergeDown(merge,changelist):
global g_runAs
newChangelist = None
changelistNumber = changelist['change']
changelistHistory = []
# try to find the original changelist number in the changelist description
foundOriginal = re.search('Merge @([0-9]*) ',changelist['desc'])
if foundOriginal is not None:
# get the original changelist number
originalChangelistNumber = foundOriginal.group(1)
debug("This is a merge-of-a-merge...original change was "+originalChangelistNumber)
foundHistory = re.search('\[merge history: ([0-9,]*)\]',changelist['desc'])
# retrieve the changelist history from the description, if available
if foundHistory is not None:
changelistHistory = foundHistory.group(1).split(',')
else:
# this changelist _is_ the original changelist
debug("This is the original submit being merged")
originalChangelistNumber = changelistNumber
# append the current changelist number to the history
changelistHistory.append(changelistNumber)
try:
# Check to see if there are files open in the current workspace. If there are, raise an exception
if not g_args.daemonMode:
opened = getOpened(getBranchPath(merge['target']))
if len(opened) > 0:
raiseAutomergeException(FILESOPEN_ERROR, "Files cannot be open in your workspace", map={'opened':opened})
checkForMovedFiles(merge['branchSpec'],changelistNumber)
info("Previewing integration...")
ip = doIntegrate(branchSpec=merge['branchSpec'],syncChangelistNumber=changelistNumber,preview=True)
# first, check the integration preview for any known showstopper
# flags... these indicate that manual integration is required
flagCheck = checkForIntegrationFlagMessages(ip)
if flagCheck['flag']:
datamap = {'preview':ip,'branchSpec':merge['branchSpec'],'changelist':changelist,'flagCheck':flagCheck}
raiseAutomergeException(INTEGRATEFLAG_ERROR, "Integration unable to complete", map=datamap)
# now check the length of the integrate preview... if it is zero, then there's nothing
# to do, otherwise we need to do some processing
if len(ip) > 0:
# the original changelist description will be appended to the new changelist
# description to make the process as transparent as possible
originalChangelist = getChangelist(originalChangelistNumber)
originalJobs = getJobs(originalChangelist)
if g_args.daemonMode and g_honorUserProtections:
info("Now running commands as user {0}".format(g_runAs))
g_runAs = originalChangelist['user']
else:
g_runAs = None
# build a description for the new changelist
desc = """Merge @{0} using {1}
----
{2}
[merge history: {3}]
[original submitter: {4}]
""".format(originalChangelist['change'],merge['branchSpec'],originalChangelist['desc'],",".join(changelistHistory),originalChangelist['user'],)
newChangelistNum = createChangelist(description=desc, jobs=originalJobs)
info("Created pending change @{0}".format(newChangelistNum))
newChangelist = getChangelist(newChangelistNum)
# get the integration preview again and bail out if it has changed somehow since we
# got it earlier
info("Performing a second integ preview...")
ip2 = doIntegrate(branchSpec=merge['branchSpec'],syncChangelistNumber=changelistNumber,preview=True)
# check to see if the integration preview has changed. need to do this before locking the target files, as otherwise the
# lists won't match
if not compareLists(ip, ip2):
raiseAutomergeException(INTEGRATEPREVIEWMISMATCH_ERROR, "Error: files scheduled for resolve differs between pre- and post-lock previews",map={'branchSpec':merge['branchSpec'],'changelist':changelistNumber})
# lock the files involved in the integration
info("Locking integ target files")
lockTargetFiles(merge['branchSpec'], changelist, newChangelistNum)
# ok, we got this far, so let's go ahead and perform the integrate
info("Performing integ")
doIntegrate(branchSpec=merge['branchSpec'],syncChangelistNumber=changelistNumber,newChangelistNumber=newChangelistNum,preview=False)
targetPath = getBranchPath(merge['target'])
if targetPath == None:
message = "Can't find branch name \"" + merge['target'] + "\" in branches.xml"
raiseAutomergeException(CONFIGURATION_ERROR,message)
# get a preview of the resolve
info("Getting preview of auto-resolve")
rp = doResolve(targetPath,preview=True)
# check the size of the resolve preview, comparing it to the files in the changelist
if not checkPreviewCount(changelist,rp):
# if the resolve preview count is unexpected, then skip if in daemon mode. Otherwise, prompt
# the user whether to continue or quit
if not g_args.daemonMode:
# get the files in the integration preview
toBeResolvedFiles = []
for previewItem in ip2:
if previewItem['code'] == 'stat':
toBeResolvedFiles.append(previewItem['fromFile'])
# warn the user that there are ignored files
print "\n *** WARNING ***"
print "The changelist being merged, @" + changelistNumber + ", contains the following files"
print "that will not be scheduled for resolve.\n"
for filename in getChangelistFiles(changelistNumber):
foundFile = False
for scheduledFile in toBeResolvedFiles:
if scheduledFile.find(filename) >=0:
foundFile=True
if not foundFile:
print filename
print """
This probably means that these files(s) have been deleted in the target branch, and an ignore
entry has been added to the branch map
INVESTIGATE THE CAUSE OF THE FILE(s) BEING IGNORED IN BRANCH MAP TO DETERMINE IF THE CHANGES TO
THESE FILES IN CHANGELIST {0} NEED TO BE PORTED TO OTHER FILES.
Enter continue to proceed with merge of all other files in the changelist.
""".format(changelistNumber)
prompt = " Proceed with merge? (c)ontinue or (q)uit: "
while True:
sys.stdout.flush()
ans = upper(raw_input(prompt))
if not ans:
break
if ans not in ['C','Q']:
info(' please enter c or q.')
continue
if ans == 'C':
break
if ans == 'Q':
exit("*** FINISHED - WARNING ***")
else:
raiseAutomergeException(RESOLVEPREVIEWMISMATCH_ERROR, "Error: integrate preview has different number of files than resolve preview",map={'branchSpec':merge['branchSpec'],'changelist':changelistNumber,'preview':rp})
# At this point, only continue to try to resolve and submit the changelist
# if we are running in daemon mode or if the auto flag was set
if g_args.daemonMode or g_args.auto:
# run the resolve and then check the results for errors
info("Attempting to auto-resolve")
resolveResults = doResolve(targetPath,preview=False)
resolveCheck = checkResolveResults(resolveResults)
if not resolveCheck['ok']:
raiseAutomergeException(AUTORESOLVE_ERROR, "Error: Could not AutoResolve all files",map={'results':resolveResults, 'skipped':resolveCheck['skipped']})
# if we get this far, then everything looks good... let's go ahead and try
# to submit. if the submit results in an error, stop processing
info("Submitting pending change @{0}".format(newChangelistNum))
submitResult = submitChangelist(newChangelistNum)
if submitResult[0]['code'] != 'stat':
raiseAutomergeException(SUBMIT_ERROR, "Error: Problem during submit")
else: # ip = 0
if not g_args.daemonMode:
info("There are no files to resolve")
finally:
# need to do some processing if we actually created a new changelist for the
# integration... what we do depends on whether we are running in daemon mode
# or interactive mode
if newChangelist is not None:
if g_args.daemonMode:
# unlock everything that might be locked
info("Unlocking files")
unlockfile("//...")
# revert the new changelist
cl = getChangelist(newChangelist['change'])
if cl['status'] != 'submitted':
# only need to revert the changelist if it isn't submitted
info("Deleting pending change @{0}".format(newChangelist))
deletePendingChangelist(newChangelist)
else:
# if the changelist is submitted, then change ownership to the
# original changelist user. This only needs to be done if the
# changelist was submitted as the automerge user
if not g_honorUserProtections:
info("Changing user of submitted change @{0}, to {1}".format(cl,originalChangelist['user']))
setChangelistOwner(cl,originalChangelist['user'])
else:
# not in daemonMode, thus we're in interactive mode... we want to leave
# the pending changelist in the user's workspace
cl = getChangelist(newChangelist['change'])
if cl['status'] != 'submitted':
info("**** Changelist {0} saved in workspace {1}".format(cl['change'],cl['client']))
# clear g_runAs to ensure that any subsequent p4 calls are run as automerge user
g_runAs = None
### end of mergeDown
# checkForMovedFiles
# function that looks for moved files in the specified changelist. For any moved files found, the
# routine generates the necessary move commands. In interactive mode, the user is prompted to
# confirm these commands and submit the changelist. In daemon mode, the moves are attempted in
# the target.
def checkForMovedFiles(branchSpec, sourceChangeNum):
#Look for moved files in sourceChange, if moved files are found
#inspect the integration history to find where they moved to
debug("Checking for moved files in @{0}".format(sourceChangeNum))
user=g_connectionInfo['userName']
client=g_connectionInfo['clientName']
foundMovedFiles=False
movedToLocation = {}
movedFileOldLocation = ""
merge = getMerge(branchSpec)
if g_args.promote and merge['direction'] == 'Both':
# if in a promotion situation, then the source and target from the merges.xml
# file are reversed, but this is only true if the merge in question is
# used for both MergeDown and CopyUp (i.e. is set to 'Both'). If the merge
# direction is 'CopyUp' only, then the source and target are already reversed
sourceRoot = getBranchPath(merge['target'])
targetRoot = getBranchPath(merge['source'])
else:
sourceRoot = getBranchPath(merge['source'])
targetRoot = getBranchPath(merge['target'])
debug("For move detection, assuming sourceRoot={0},targetRoot={1}".format(sourceRoot,targetRoot))
sourceChangeDetails = p4MarshalCmd(["describe","-s",str(sourceChangeNum)])[0]
# loop over the details for the changelist
for key in sourceChangeDetails:
matchAction = re.search('action([0-9]*)',key)
if matchAction:
fileAction = sourceChangeDetails[key]
# if we found a move/add, then we need to deal with it in the target stream
if fileAction == 'move/add':
fileIndex = matchAction.group(1)
movedFileNewLocation = sourceChangeDetails['depotFile'+fileIndex]
integrationHistory = p4MarshalCmd(["integrated",movedFileNewLocation])
for entry in integrationHistory:
if entry['change'] == str(sourceChangeNum):
movedFileOldLocation = entry['fromFile']
debug("Integ History reports file was moved from {0}".format(movedFileOldLocation))
#check to see if file has already exists in new location (move already performed)
movedFileNewLocationInTarget = movedFileNewLocation.replace(sourceRoot,targetRoot)
if not depotFileExists(movedFileNewLocationInTarget):
foundMovedFiles = True
movedToLocation[movedFileOldLocation] = movedFileNewLocation
info("The following file was moved in source, not yet moved in target: {0}".format(movedFileOldLocation))
else:
info("The following file was moved in source, already moved in target:\n\t{0}".format(movedFileNewLocationInTarget))
# if we have any moved files, we need to deal with them. If not, we are done
if foundMovedFiles:
# in interactive mode, we want to prompt the user for choices
if not g_args.daemonMode:
info("**** Moved Files Detected in Source Changelist ***")
for oldFile in movedToLocation:
info(oldFile + " => " + movedToLocation[oldFile])
info("Files must first be moved in target stream before merging")
info("changelist @{0} ...".format(sourceChangeNum))
info("Continue to generate p4 move commands for your review:")
prompt = " What do you want to do? (c)ontinue or (q)uit: "
while True:
sys.stdout.flush()
ans = upper(raw_input(prompt))
if not ans:
break
if ans not in ['C','Q']:
info(' please enter c or q.')
continue
if ans == 'C':
break
if ans == 'Q':
exit()
#create new pending changelist for moves
moveDescription = "Propagating moves from changelist {0} prior to integration [reviewed-by:none]".format(sourceChangeNum)
moveChangeListNumber = createChangelist(description=moveDescription)
debug("Created pending change @{0} for moving files".format(moveChangeListNumber))
#generate move commands
moveCommands = []
for oldFile in movedToLocation:
targetBranchOldLocation = oldFile.replace(sourceRoot, targetRoot)
targetBranchNewLocation = movedToLocation[oldFile].replace(sourceRoot, targetRoot)
debug("Generating move command to move {0} to {1}".format(targetBranchOldLocation, targetBranchNewLocation))
# if the old target file has been renamed or moved, then the variable targetBranchOldLocation
# will be wrong. This next part attempts to find the new location of the file. If found,
# then that new location will be targetBranchOldLocation. In the end, the targetBranchNewLocation
# will be the new name/location for the file, as trying to figure out how that maps to some
# arbitrary moved location is too tricky ;) If things cannot be figured out, an exception will
# be raised and the message that the script is unable to figure things out will be inserted
# The net effect of the following code is this: if a file was moved in the source branch to location A
# and was also (prior to the merge of the move) moved to location B in the target Branch, the merge
# will perform perform a move of the file from location A to location B.
if not depotFileExists(targetBranchOldLocation):
debug("The following file was not found in target branch, checking to see if it was moved to another location: {0}".format(targetBranchOldLocation))
ok = False
if depotFileExists(targetBranchOldLocation,showDeleted=True):
fs = fstat(targetBranchOldLocation)
if fs['headAction'] == "move/delete":
debug("Confirmed {0} has already been moved...finding out where".format(targetBranchOldLocation))
results = describeChangelist(fs['headChange'])
if not containsError(results):
done = False
for key in results[0]: # outer loop
if done:
break # this breaks out of the outer loop
matchAction = re.search('action([0-9]*)',key)
if matchAction:
fileAction = results[0][key]
if fileAction == 'move/add':
fileIndex = matchAction.group(1)
mfnl = results[0]['depotFile'+fileIndex]
debug("Found file was moved to {0}".format(mfnl))
integrationHistory = p4MarshalCmd(["integrated",mfnl])
for entry in integrationHistory: # inner loop
if entry['change'] == fs['headChange']:
if entry['fromFile'] == targetBranchOldLocation:
debug("Confirmed the move occurred in @{0}".format(entry['change']))
done = True # set to true to break out of the outer loop
targetBranchOldLocation = mfnl # resets the variable
ok = True # set to true to avoid raising an exception
log("Detected {0} was previously moved to {1} in target branch, will proceed with move to {3}".format(oldFile,mfnl,targetBranchNewLocation))
break # this breaks out of the inner loop
if not ok:
raiseAutomergeException(MOVE_ERROR, "Error: Detected possible simultaneous move in source and target: {0}".format(targetBranchOldLocation),map={'branchSpec':branchSpec,'changelist':sourceChangeNum})
syncCommand = ["sync","-f",targetBranchOldLocation]
editCommand = ["edit","-c",str(moveChangeListNumber),targetBranchOldLocation]
moveCommand = ["move","-c",str(moveChangeListNumber),targetBranchOldLocation,targetBranchNewLocation]
if g_args.daemonMode:
moveCommands.append(syncCommand)
moveCommands.append(editCommand)
moveCommands.append(moveCommand)
# in interactive mode, we want to prompt the user for choices
if not g_args.daemonMode:
info("*** REVIEW MOVE COMMANDS TO VERIFY THEY ARE CORRECT *** ")
for i in range(len(moveCommands)):
if i % 2 != 0:
continue
print " ".join(moveCommands[i])
print " ".join(moveCommands[i+1])
#get verificaiton from user to execute moves
prompt = " Do you want to execute these commands? (c)ontinue or (q)uit: "
while True:
sys.stdout.flush()
ans = upper(raw_input(prompt))
if not ans:
break
if ans not in ['C','Q']:
info(' please enter c or q.')
continue
if ans == 'C':
break
if ans == 'Q':
exit()
# try to perform the move commands, throw an exception if there's an error
for command in moveCommands:
if not g_args.daemonMode:
info("Running Command: " + " ".join(command))
results = p4MarshalCmd(command)
if containsError(results):
raiseAutomergeException(COMMAND_ERROR, "Error while attempting to move file...", command=cmd)
# try to submit the changelist
submitChangelist(moveChangeListNumber)
### end of checkForMovedFiles
###########################################################################
##### EMAIL MESSAGES USED BY DAEMON MODE
#####
# sendScriptFailureMessage
# sends the exception to the automergeAdmins
def sendScriptFailureMessage(exception):
sendEmail(g_automergeAdmins, "Automerge Script Error", str(exception))
# getBranchOwnerEmail
# function to retrieve the branch owner's email address,
# returns None if not found
def getBranchOwnerEmail(branchSpec):
branchOwnerEmail = None
merge = getMerge(branchSpec)
if merge is not None:
branchOwner = getBranchOwner(merge['target'])
if branchOwner is not None:
branchUser = getUser(branchOwner)
if branchUser is not None:
branchOwnerEmail = branchUser['Email']
return branchOwnerEmail
def email_NoAutomerge(ex):
changelist = ex.data['changelist']
user = getUser(changelist['user'])
emailRecipients = [user['Email']]
branchOwnerEmail = getBranchOwnerEmail(ex.data['branchSpec'])
if branchOwnerEmail is not None:
emailRecipients.append(branchOwnerEmail)
mailMessage = """Dear {fullName},
The changelist number {number} submitted by you on {dateTime} could not be
processed automatically by the Automerge daemon process because the changelist
description contains the string [automerge:no].
This changelist must be integrated manually before any changes in the
branch {branchName} can be integrated. Please do this as soon as possible.
Sincerely,
The Automerge Daemon
""".format(
fullName=user['FullName'],
number=changelist['change'],
dateTime=formatTime(changelist['time']),
branchName=ex.data['branchSpec'])
subject = "[AUTOMERGE] Automerge halted: encountered [automerge:no] in changelist {0} -- Needs manual integration".format(changelist['change'])
sendEmail(emailRecipients, subject, mailMessage)
def email_IntegrationFlagsRequired(ex):
changelist = ex.data['changelist']
user = getUser(changelist['user'])
branchSpec = ex.data['branchSpec']
emailRecipients = [user['Email']]
branchOwnerEmail = getBranchOwnerEmail(branchSpec)
if branchOwnerEmail is not None:
emailRecipients.append(branchOwnerEmail)
previewMessage = ""
preview = ex.data['preview']
for p in preview:
if 'data' in p:
previewMessage += " {0}".format(p['data'])
mailMessage = """Dear {fullName},
The changelist {number} submitted by you on {dateTime} could not be
processed automatically by the Automerge daemon process.
This changelist must be integrated manually before any changes in the
branch {branchSpec} can be integrated. Please do this as soon as possible.
Sincerely,
The Automerge Daemon
""".format(
fullName=user['FullName'],
number=changelist['change'],
dateTime=formatTime(changelist['time']),
branchSpec=branchSpec)
subject = "[AUTOMERGE] Automerge halted: encountered integration errors in changelist {0} -- Needs manual integration".format(changelist['change'])
sendEmail(emailRecipients, subject, mailMessage)
### end of email_IntegrationFlagsRequired
def email_PreviewMismatch(ex):
changelist = ex.data['changelist']
user = getUser(changelist['user'])
branchSpec = ex.data['branchSpec']
emailRecipients = [user['Email']]
branchOwnerEmail = getBranchOwnerEmail(branchSpec)
if branchOwnerEmail is not None:
emailRecipients.append(branchOwnerEmail)
mailMessage = """Dear {fullName},
Changelist {number} submitted by you on {dateTime} could not be
processed automatically by the Automerge daemon process.
Automerge began merge processing of this changelist, but detected
differences between the integrate preview performed at the beginning
of the merge and the second integrate preview performed just before locking
the target files.
This was possibly due to another user submitting changes to file(s) contained
in the changelist during merge processing.
This changelist must be integrated manually before any changes in the
branch {branchSpec} can be integrated. Please do this as soon as possible.
Sincerely,
The Automerge Daemon
""".format(
fullName=user['FullName'],
number=changelist['change'],
dateTime=formatTime(changelist['time']),
branchSpec=branchSpec)
subject = "[AUTOMERGE] Automerge halted: encountered preview mismatch while merging changelist {0} -- Needs manual integration".format(changelist['change'])
sendEmail(emailRecipients, subject, mailMessage)
def email_SizeMismatch(ex):
changelist = ex.data['changelist']
user = getUser(changelist['user'])
branchSpec = ex.data['branchSpec']
emailRecipients = [user['Email']]
branchOwnerEmail = getBranchOwnerEmail(branchSpec)
if branchOwnerEmail is not None:
emailRecipients.append(branchOwnerEmail)
mailMessage = """Dear {fullName},
Changelist {number} submitted by you on {dateTime} could not be
processed automatically by the Automerge daemon process.
This changelist contains file(s) that were not be scheduled for resolve.
This probably means that these file(s) have been deleted in the target branch,
and an ignore entry has been added to the branch map.
The changes to these ignored files may need to be incorporated
into other files if refactoring has occured.
This changelist must be integrated manually before any other changes
using branch-map {branchSpec} can be integrated.
Please do this as soon as possible.
Sincerely,
The Automerge Daemon
""".format(
fullName=user['FullName'],
number=changelist['change'],
dateTime=formatTime(changelist['time']),
branchSpec=branchSpec)
subject = "[AUTOMERGE] Automerge halted: possible ignored files in changelist {0} -- Needs manual integration".format(changelist['change'])
sendEmail(emailRecipients, subject, mailMessage)
def email_MaxChangelistFiles(ex):
changelist = ex.data['changelist']
user = getUser(changelist['user'])
branchSpec = ex.data['branchSpec']
emailRecipients = [user['Email']]
branchOwnerEmail = getBranchOwnerEmail(branchSpec)
if branchOwnerEmail is not None:
emailRecipients.append(branchOwnerEmail)
mailMessage = """Dear {fullName},
Changelist {number} submitted by you on {dateTime} could not be
processed automatically by the Automerge daemon process.
The number of files in this changelist exceeds {maxLimit} files. Automerge
is currently configured to only merge changelists where the number
of files do not exceed this limit.
This changelist must be integrated manually before any additional changes using
branch-map {branchSpec} can be integrated. Please do this as soon as possible.
Sincerely,
The Automerge Daemon
""".format(
fullName=user['FullName'],
number=changelist['change'],
dateTime=formatTime(changelist['time']),
maxLimit=str(g_maxChangelistFiles),
branchSpec=branchSpec)
subject = "[AUTOMERGE] Automerge halted: changelist {0} Exceeds Maximum Files -- Needs manual integration".format(changelist['change'])
sendEmail(emailRecipients, subject, mailMessage)
def email_ResolveCheck(ex):
command=" ".join(ex.data['command'])
results=ex.data['results']
emailRecipients = []
for admin in g_automergeAdmins:
emailRecipients.append(admin)
mailMessage = """Dear automerge admins,
An error was encountered while attempting to perform the following p4 resolve
command:
{resolveCommand}
which generated the following error:
{resolveResults}
Please investigate and resolve as soon as possible.
Sincerely,
The Automerge Daemon
""".format(
resolveCommand=command,
resolveResults=results)
subject = "[AUTOMERGE] Automerge halted: Resolve Error"
sendEmail(emailRecipients, subject, mailMessage)
def email_MoveError(ex):
changelist = ex.data['changelist']
errorMessage = ex.parameter
user = getUser(changelist['user'])
branchSpec = ex.data['branchSpec']
emailRecipients = [user['Email']]
branchOwnerEmail = getBranchOwnerEmail(branchSpec)
if branchOwnerEmail is not None:
emailRecipients.append(branchOwnerEmail)
mailMessage = """Dear {fullName},
Changelist {number} submitted by you on {dateTime} could not be
processed automatically by the Automerge daemon process.
An error was encountered while attempting to propagate moved files
from source branch to the target branch.
{message}
This changelist must be integrated manually before any changes in the
branch {branchSpec} can be integrated. Please do this as soon as possible.
Sincerely,
The Automerge Daemon
""".format(
fullName=user['FullName'],
number=changelist['change'],
dateTime=formatTime(changelist['time']),
message=errorMessage,
branchSpec=branchSpec)
subject = "[AUTOMERGE] Automerge halted: couldn't propagate moves in changelist {0} -- Needs manual integration".format(changelist['change'])
sendEmail(emailRecipients, subject, mailMessage)
def email_GenericError(ex):
emailRecipients = []
exceptionMessage = ex.parameter
exceptionData = ex.data
for admin in g_automergeAdmins:
emailRecipients.append(admin)
mailMessage = """Dear automerge admins,
Automerge encountered a generic exception (i.e. an exception with
no specific handler).
Exception Message:
{message}
Exception Data:
{data}
Processing was halted after encountering this error. Please investigate
the root cause. You may need to consider adding a specific
handler
for this type of exception.
Sincerely,
The Automerge Daemon
""".format(
message=exceptionMessage,
data=exceptionData)
subject = "[AUTOMERGE] Automerge halted: Generic Exception Encountered"
sendEmail(emailRecipients, subject, mailMessage)
###########################################################################
##### DAEMON MODE
#####
def daemonMode():
# first check to see that the automerge account is logged in
checkLogin(g_amUser)
# update the client to include the paths for the various branch specifications
updateAutomergeClient()
# retrieve a list of the automerge lines from the merges.xml file
merges = getAutomergeBranchspecs()
# iterate over that list in order
exceptionCaught = False
for merge in merges:
info("Merging changes using {0}".format(merge['branchSpec']))
# check to see if the target branch is locked... if so, skip this merge
if isLocked(merge['target']):
warn("skipping {0} because {1} is locked".format(merge['branchSpec'],merge['target']))
continue
try:
# retrieve a list of the changelists that have not yet been integrated
# in the given branch specification
interchanges = getInterchanges(merge['branchSpec'])
info("Found {0} unmerged changes".format(len(interchanges)))
stuckChangelistCounterName = "automerge.stuckchangelist.{0}".format(merge['branchSpec'])
# check to see if the next changelist that will be processed was one that we
# got stuck on last time... if so, then we should just move on to the next merge
if len(interchanges) > 0:
counters = getAutomergeCounters(prefix=stuckChangelistCounterName)
if stuckChangelistCounterName in counters:
if int(interchanges[0]['change']) <= int(counters[stuckChangelistCounterName]):
warn("Skipping merge {0} due to stuck changelist {1}".format(merge['branchSpec'], counters[stuckChangelistCounterName]))
continue
# iterate over that list of changelists
for ic in interchanges:
info("\nAttempting automerge of @{0}".format(ic['change']))
# retrieve the "real" changelist for the given interchange entry
cl = getChangelist(ic['change'])
# check to see if "[automerge:no]" exists in the changelist description. If it does,
# then we need to stop processing now
if re.search( r'\[\s*automerge\s*:\s*no\s*\]', cl['desc'], re.M|re.I):
info("Setting stuck change counter {0} to {1}".format(stuckChangelistCounterName, ic['change']))
setCounter(stuckChangelistCounterName, ic['change'])
raiseAutomergeException(NOAUTOMERGE_ERROR, "Error: encountered automerge:no in changelist description. Manual merge required",
map={'branchSpec':merge['branchSpec'],'changelist':cl})
# check to see if the number of files in the changelist exceeds the maximum set
# in the automerge.ini file. If so, stop processing now
if countDepotFiles(cl) > g_maxChangelistFiles:
info("Setting stuck change counter {0} to {1}".format(stuckChangelistCounterName, ic['change']))
setCounter(stuckChangelistCounterName, ic['change'])
raiseAutomergeException(MAXFILES_ERROR, "Error: Changelist file count exceeds maximum. Manual merge required",
map={'branchSpec':merge['branchSpec'],'changelist':cl})
ae = AutomergeException("Error: Changelist file count exceeds maximum. Manual merge required")
# if we get to this point, then we should try to merge down the changelist
mergeDown(merge,cl)
except AutomergeException as ex:
# if an exception is caught, process it appropriately
# if the showstopper flag is set, then automerge stops processing. Otherwise
# it continues to the next branchspec.
error(str(ex))
sendScriptFailureMessage(ex)
exceptionCaught = True
stopProcessing = False # if set to true for an error, processing will halt
t = None
if 'type' in ex.data:
t = ex.data['type']
if t == NOAUTOMERGE_ERROR:
email_NoAutomerge(ex)
elif t == CHANGELIST_ERROR:
error("ERROR CREATING CHANGELIST")
error(ex.data['changeSpec'])
stopProcessing = True
elif t == INTEGRATEFLAG_ERROR:
email_IntegrationFlagsRequired(ex)
elif t == INTEGRATEPREVIEWMISMATCH_ERROR:
email_PreviewMismatch(ex)
elif t == RESOLVEPREVIEWMISMATCH_ERROR:
email_SizeMismatch(ex)
elif t == MAXFILES_ERROR:
email_MaxChangelistFiles(ex)
elif t == RESOLVE_ERROR:
email_ResolveCheck(ex)
elif t == MOVE_ERROR:
email_MoveError(ex)
elif t == AUTORESOLVE_ERROR:
email_AutoresolveError(ex)
else:
email_GenericError(ex)
stopProcessing = True
if stopProcessing:
errorExit(ex.parameter)
if not exceptionCaught:
clearEmailCounters()
# clean things up
cleanupWorkspace()
### end of daemonMode
###########################################################################
##### INTERACTIVE MODE
#####
def interactiveMode():
try:
# make sure that the user is logged in... if not exit with message
checkLogin(user)
# get the merge XML element for the given branchspec
merge = getMerge(g_args.branchSpec)
# retrieve the interchanges for the given branchspec, up to the specified changelist
interchanges = getInterchanges(g_args.branchSpec,g_args.changelist)
if len(interchanges) > 0:
info("Found {0} changes that need merging\n".format(len(interchanges)))
# now check to see if we're processing multiple changelists...
if not g_args.auto:
if g_args.changelist != interchanges[0]['change']:
raiseAutomergeException(OUTOFORDER_ERROR, "*** Cannot Merge Changelists Out of Order ***",
map={'changelist':g_args.changelist,'interchanges':interchanges})
for ic in interchanges:
cl = getChangelist(ic['change'])
if cl == None:
raiseAutomergeException(CHANGELISTMISSING_ERROR, "Unable to retrieve changelist {0}".format(ic['change']),
map={'interchanges':interchanges})
# if we get to this point, then we should try to merge down the changelist
info("Attempting automerge of @{0}...".format(cl['change']))
mergeDown(merge,cl)
else:
exit("Nothing to integrate for changelist {0} on branch {1}".format(g_args.changelist,g_args.branchSpec))
except AutomergeException as ae:
debug("Exception Caught: AutomergeException: " + ae.parameter)
### EXCEPTION TYPE: CHANGELIST PROCESSING OUT OF ORDER
if ae.data['type'] == OUTOFORDER_ERROR:
error("*** ERROR: OUT OF ORDER MERGE ***")
error("Changelist @" + ae.data['changelist'] + " cannot be merged out of order.")
error("The following changelists have not yet been merged,")
error("rerun once these changes have been merged.....")
error("")
for change in ae.data['interchanges']:
if change['change'] == ae.data['changelist']:
#found the one the user wants...stop printing here
break
error(" @" + change['change'] + " submitted by " + change['user'])
error("")
error("If necessary, contact the submitter of the change to perform")
error("the merge of these older changelist(s).")
exit("*** FINISHED - ERROR ***")
### EXCEPTION TYPE: createChangelist
elif ae.data['type'] == CHANGELIST_ERROR:
error("*** ERROR: CANNOT CREATE CHANGELIST ***")
print ae.data['changeSpec']
exit("*** FINISHED - ERROR ***")
### EXCEPTION TYPE: changeListMissing
elif ae.data['type'] == CHANGELISTMISSING_ERROR:
error("*** ERROR: CHANGELIST MISSING ***")
error(ae.parameter)
exit("*** FINISHED - ERROR ***")
### EXCEPTION TYPE: flagsRequired
elif ae.data['type'] == INTEGRATEFLAG_ERROR:
changelist = ae.data['changelist']
preview = ae.data['preview']
previewMessage = ""
for p in preview:
if 'data' in p:
previewMessage += " {0}".format(p['data'])
error("*** INTEGRATION ERROR ***")
error("The following errors were encountered during the integrate preview:")
error(previewMessage)
error("You may need to update the branch map to resolve this problem.")
error("Rerun AutoMerge to resume merges after this change has been manually resolved.")
exit("*** FINISHED - ERROR ***")
### EXCEPTION TYPE: integ
elif ae.data['type'] == INTEGRATE_ERROR:
changelist = ae.data['changelist']
changelistFiles = getChangelistFiles(changelist['change'])
warn("*** WARNING ***")
warn("The changelist being merged, @" + changelist['change'] + ", contains " + str(len(changelistFiles)))
warn("files, but the integration preview reports no files will be integrated.")
warn("This probably means the modified files(s) have been deleted in the target branch,")
warn("and an ignore entry has been added to the branch map.")
warn("which functionality possibly refactored in other files. INVESTIGATE THE CAUSE")
warn("OF THE FILE BEING DELETED IN THE TARGET BRANCH TO DETERMINE IF THE CHANGES IN")
warn("CHANGELIST " + changelist['change'] + " NEED TO BE PORTED TO OTHER FILES.")
warn("Changelist does not need to be merged, however, other files may need to be")
warn("modified in the target branch to incorporate the fix.")
exit("*** FINISHED - WARNING ***")
elif ae.data['type'] == RESOLVEPREVIEWMISMATCH_ERROR:
warn("*** WARNING ***")
resolvePreviewFiles = []
if 'preview' in ae.data:
for line in ae.data['preview']:
resolvePreviewFiles.append(line['fromFile'])
if 'changelist' in ae.data:
warn('The following changelist could not be auto-resolved due to conflicts:')
cl = ae.data['changelist']
warn('Change: @{0}'.format(cl['change']))
warn('Original Owner: {0}'.format(cl['owner']))
counter = 0
for i in range(countDepotFiles(changelist)):
actionLabel = "action{0}".format(i)
depotFileLabel = "depotFile{0}".format(i)
depotFile = cl[depotFileLabel]
if not depotFile in resolvePreviewFiles:
warn("{0} ({1})".format(depotFile,cl[actionLabel]))
warn("Rerun AutoMerge to resume merges after this change has been manually resolved.")
exit("*** FINISHED - WARNING ***")
### EXCEPTION TYPE: moveError
elif ae.data['type'] == MOVE_ERROR:
error("*** MOVE/RENAME PROBLEM ***")
if 'file' in ae.data:
error("An error was encountered while trying to generate the edit/move commands.")
error("The file that caused the problem is:")
error(" {0}".format(ae.data['file']))
error("The file does not seem to exist in the depot.")
elif 'cmd' in ae.data:
error("An error was encountered while executing the command:")
error(" {0}".format(" ".join(ae.data['cmd'])))
exit("*** FINISHED - ERROR ***")
### EXCEPTION TYPE: autoresolveError
elif ae.data['type'] == AUTORESOLVE_ERROR:
error("*** AUTOMATIC RESOLVE ERROR ***")
error("*** Could not AutoResolve all files. The following were skipped:")
for f in ae.data['skipped']:
error(" {0}".format(f))
exit("*** FINISHED - ERROR ***\n")
### EXCEPTION TYPE: filesOpen
elif ae.data['type'] == FILESOPEN_ERROR:
error("*** FILES OPEN IN WORKSPACE ***")
error("*** Merge cannot occur if there are opened files. The following are open in your workspace:")
for f in ae.data['opened']:
error(" {0}".format(f))
exit("*** FINISHED - ERROR ***\n")
### SOME UNKNOWN EXCEPTION HAPPENED -- WE SHOULD NOT SEE THIS AND INSTEAD ACCOUNT FOR ALL ERROR TYPES
else:
error("*** EXCEPTION ENCOUNTERED ***")
error(ae.parameter)
error("type: {0}".format(ae.data['type']))
exit("*** FINISHED - ERROR ***")
### end of interactiveMode
###########################################################################
##### MAIN PROGRAM STARTS HERE
#####
def main(argv=None):
global g_args,g_parser,g_connectionInfo,user,client,g_amClientName,g_logfile
if argv is None:
argv = sys.argv
g_parser = argparse.ArgumentParser(description='AutoMerge processing script',usage=PROGRAM_USAGE)
g_parser.add_argument('-d', action='store_true', default=False, dest='daemonMode')
g_parser.add_argument('-b', action='store', dest='branchSpec', default="NONE")
g_parser.add_argument('-c', action='store', dest='changelist', default="NONE")
g_parser.add_argument('-v', action='store_true', default=False, dest='verbose')
g_parser.add_argument('--auto', action='store_true', default=False, dest='auto')
g_parser.add_argument('--promote', action='store_true', default=False, dest='promote')
g_parser.add_argument('--stdout', action='store_true', default=False, dest='stdout')
g_args = g_parser.parse_args()
if g_args.daemonMode:
starttime = time.time()
# get server information
g_connectionInfo = getConnectionInfo()
# parse out the version information and check to make sure it's supported
version = getServerVersion(g_connectionInfo['serverVersion'])
if version['major'] != g_supported_p4d_version:
error("The server is running a version ({0}) that is not supported by this script. {1} is required.".format(version['major'],g_supported_p4d_version))
exit()
# initialize the global variables
initialize()
#verify branches.xml and merge.xml don't have errors
verifyXMLFiles()
# next we need to determine if we are running in daemon or interactive mode... the main difference
# is that daemon mode will cycle through the branches, attempting to integrate, resolve and submit
# automatically. interactive mode, on the other hand, requires that the user specify the branch
# and changelist. interactive mode does not automatically submit, instead prompting the user to
# confirm the submission.
if g_args.daemonMode:
# daemon mode includes some timing information -- before doing some work we get the current
# timestamp using time.time() and save that in starttime. In the finally block we again
# obtain the current timestamp and then log the difference between that time and starttime.
if not g_args.stdout and len(g_logfileStr) > 0:
g_logfile = open(g_logfileStr, "a")
# check to see if automerge is enabled in the automerge.ini file. If it is not, bail out with
# a log message
if not g_automergeEnabled:
print "AUTOMERGE HAS BEEN DISABLED"
info("### AUTOMERGE RUN HALTED -- DISABLED {0}".format(time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())))
exit()
info("########## STARTING ###### {0}".format(time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())))
if g_connectionInfo['userName'] in g_automergeUsers:
debug("### Daemon mode")
# set client name
g_amClientName = g_hostname + "_automerge"
daemonMode()
else:
g_logfile = sys.stdout
errorExit("You do not have authorization to run automerge in daemon mode")
else:
debug("### Interactive mode")
user=g_connectionInfo['userName']
client=g_connectionInfo['clientName']
if g_args.branchSpec == "NONE":
usage("ERROR: Branch specification is required");
exit()
else:
#verify branch spec exists
results = p4MarshalCmd(["branches","-e",g_args.branchSpec])
if containsError(results):
errorExit("Error validating branchSpec " + g_args.branchSpec + " error with branches command")
if results == []:
errorExit(g_args.branchSpec + " entered as -b argument, but this branch spec does not exist in P4")
try:
getMerge(g_args.branchSpec)
except AutomergeException as ae:
error("*** ERROR IN merges.xml CONFIG FILE ***")
error(ae.parameter)
exit("*** FINISHED - ERROR ***\n")
t = getMergeTarget(g_args.branchSpec)
if isLocked(t):
lockStatus = getLockStatus(t)
message = "Target ({0}) of branch specification {1} is currently locked".format(t,g_args.branchSpec)
if 'lockuser' in lockStatus:
message += "\n Locked by user: {0}".format(lockStatus['lockuser'])
if 'locktime' in lockStatus:
message += "\n Locked since: {0}".format(formatTime(lockStatus['locktime']))
error("*** BRANCH LOCKED ***")
error(message)
exit("*** FINISHED - ERROR ***\n")
if g_args.changelist == "NONE" and not g_args.promote:
usage("\n*** ERROR: Changelist is required ***");
exit();
if not g_args.promote:
if getChangelist(g_args.changelist) == None:
error("*** CHANGELIST NOT FOUND ***")
error("changelist " + g_args.changelist + " does not exist")
exit("*** FINISHED - ERROR ***\n")
if g_args.promote:
try:
getMerge(g_args.branchSpec, "CopyUp")
except AutomergeException as ae:
error("*** PROMOTION NOT ALLOWED ***")
error(ae.parameter)
exit("*** FINISHED - ERROR ***\n")
doCopyUp()
else:
interactiveMode()
if g_args.daemonMode:
info("########## FINISHED ###### RUN TIME: {0} sec".format(time.time() - starttime))
else:
info("*** AUTOMERGE FINISHED ***")
### end of main
if __name__ == '__main__':
sys.exit(main())