#********************************************************************
#
# Copyright (C) 2005-2006 Hari Krishna Dara
#
# This file is part of p4admin.
#
# p4admin is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# p4admin is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#*******************************************************************
import os
import sys
import re
import string
import logging
import logging.handlers
import optparse
import datetime
import utils
import notify
# Importing self. This seems to be perfectly valid, and a way to obtain
# reference to the "self" module.
# CAUTION: This is actually tricky. When config is imported from other modules,
# this import returns immediately with an instance of the module that is
# incomplete (this is a feature of python for supporting circular imports).
# But when config is executed as stand-alone, python tries to execute the
# module twice, once as the main module, and once as the imported module (and
# for some reason, this causes weird missing attribute errors, though I
# verified that the module instance is exactly same). Fortunately, we do that
# only for testing.
# FIXME: I should replace this tricky self import by using a default_config.py
# module. The order then would be to:
# import default_config
# import user_config
# apply command-line-options.
# Currently we rely on python to generate NameError for missing basic
# configuration, but with the above change, we will have to do it manually
# (at least for some of them).
import config
log = logging.getLogger(__name__)
rootLogger = logging.getLogger()
# This check is required to avoid configuring logging again and again, while
# reloading the modules.
if not utils.typeIn(logging.StreamHandler, rootLogger.handlers):
if sys.stdout.fileno() != -1:
# Basic console logger, before we can configure a file logger.
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter("%(message)s"))
rootLogger.addHandler(handler)
# Enable only for debuggging windows services, empty Application event log
# first.
#elif re.search(r'PythonService.exe', sys.argv[0]):
# handler = logging.handlers.NTEventLogHandler('P4AdminSvc')
# rootLogger.addHandler(handler)
optparser = optparse.OptionParser(usage="usage: %prog [options]", version="%prog 1.0")
optparser.add_option("-c", "--config", dest="userConfigFile",
help="specify a user configuration python module to import(default: "
"user_config)", default="user_config", metavar="PYMOD")
# TODO: Make it accept string such that the user can specify INFO etc..
optparser.add_option("-l", "--log-level", dest="logLevel",
help="specify the level at which logging should be done (see logging, "
"default: INFO)", metavar="LOGLEVEL")
optparser.add_option("-D", "--define", dest="defns", action="append",
help="specify name=value pairs to override configured parameters",
metavar="name=value")
(opts, args) = optparser.parse_args()
del args # We don't use this.
# Based on http://starship.python.net/crew/theller/moin.cgi/HowToDetermineIfRunningFromExe
scriptsRoot = utils.getScriptsRoot()
# Add ../cfg to the path such that user configuration file can be placed there
# also.
sys.path.append(scriptsRoot+'/cfg')
## First import everything from the user configuration file.
try:
exec 'from '+opts.userConfigFile+' import *'
except:
log.exception('exception')
optparser.error('Exception importing user configuration python module: ' +
opts.userConfigFile)
if opts.logLevel != None:
logLevel = opts.logLevel
## Now override them with those specified on the command-line (if any)
if opts.defns:
for defn in opts.defns:
try:
(name, value) = string.split(defn, '=')
# This is the only way you can override a value with None.
if value == 'None':
value = None
setattr(config, name, value)
except ValueError, ex:
opts.error('-D option needs name=value pairs: '+defn)
del defn
# We don't set any defaults for the configuration parameters when they are
# required, and let any missing parameter cause runtime exception.
# By default, we don't assume that the scripts are in perforce.
utils.setDefault(config, 'packageInPerforce', False)
if not packageInPerforce:
utils.setDefault(config, 'p4ClientRemote', None)
utils.setDefault(config, 'p4ClientLocal', None)
if sys.platform not in ('win32', 'cygwin'):
utils.setDefault(config, 'p4RemoteClient', None)
utils.setDefault(config, 'packageSyncEnabled', True)
##
## START of advanced configuration.
##
utils.setDefault(config, 'p4Password', None)
utils.setDefault(config, 'syncScmFiles', ('depot/',))
# Use daemon mode transfer.
utils.setDefault(config, 'rsyncConfigFileName', None)
if rsyncConfigFileName != None and packageInPerforce:
utils.setDefault(config, 'rsyncRemoteConfDir', None)
utils.setDefault(config, 'logLevel', 'INFO')
utils.setDefault(config, 'logFormat',
"%(asctime)s [%(levelname)s] %(name)s: %(message)s")
utils.setDefault(config, 'logFileMaxSize', 10*1024*1024) # 10Mb
utils.setDefault(config, 'logFileMaxOld', 3)
# rsync options
# rsync over cygwin rsh is currently broken.
#utils.setDefault(config, 'remoteShell', 'rsh')
utils.setDefault(config, 'remoteShell', 'ssh')
utils.setDefault(config, 'rsyncOpts', '--stats -av')
utils.setDefault(config, 'rsyncPort', None)
utils.setDefault(config, 'quickRsyncExcludeFile', None)
utils.setDefault(config, 'rsyncRootModuleName', 'perforceroot')
utils.setDefault(config, 'rsyncJnlModuleName', 'perforcejournals')
utils.setDefault(config, 'incomingSubDir', 'incoming')
# If set to false, the journal is not truncated before backing up data. This is
# not desirable, and used to force a backup of data alone.
utils.setDefault(config, 'truncateJournal', 'True')
utils.setDefault(config, 'journalTruncPrefix', 'trunc')
# Set to False if you want to manage checkpoints and journals taken on backup
# server yourself.
utils.setDefault(config, 'autoManageCheckpoints', True)
# Number of local checkpoints to keep. The journals corresponding to them will
# also be kept.
utils.setDefault(config, 'numOldCheckPoints', 3)
utils.setDefault(config, 'logFile', "logs/p4admin.log.txt")
utils.setDefault(config, 'logFileShared', None)
utils.setDefault(config, 'jobExpectedRunDuration', datetime.timedelta(hours=24))
# Default Schedules
# Quick sync is run immediately after startup and every 1/2hr after that in a
# "non-heartbeat" mode (which means, there is at least 1/2hr idle time between
# two consecutive quick sync runs).
utils.setDefault(config, 'quickSyncRunInterval', 30*60) # 30min
utils.setDefault(config, 'quickSyncFirstRun', None) # Immediately
utils.setDefault(config, 'quickSyncHeartbeat', False)
utils.setDefault(config, 'quickSyncNotifyRun', False)
# timedelta object
utils.setDefault(config, 'quickSyncExpectedRunDuration', jobExpectedRunDuration)
utils.setDefault(config, 'quickSyncEnabled', False)
# Run full sync at 8pm (24hr format) every day. Full sync is run at an interval
# of 24hrs in a "heartbeat" mode.
utils.setDefault(config, 'fullSyncRunInterval', 24*60*60) # 24hrs.
utils.setDefault(config, 'fullSyncFirstRun', "20:00:00") # 8pm
utils.setDefault(config, 'fullSyncHeartbeat', True)
utils.setDefault(config, 'fullSyncNotifyRun', False)
# timedelta object
utils.setDefault(config, 'fullSyncExpectedRunDuration', jobExpectedRunDuration)
utils.setDefault(config, 'fullSyncEnabled', True)
# Run checkpoint at 9pm (24hr format) every day. Checkpoint is run at an interval
# of 24hrs in a "heartbeat" mode.
utils.setDefault(config, 'checkpointRunInterval', 24*60*60) # 24hrs.
utils.setDefault(config, 'checkpointFirstRun', "21:00:00") # 9pm
utils.setDefault(config, 'checkpointHeartbeat', True)
utils.setDefault(config, 'checkpointNotifyRun', False)
# timedelta object
utils.setDefault(config, 'checkpointExpectedRunDuration', jobExpectedRunDuration)
utils.setDefault(config, 'checkpointEnabled', False)
utils.setDefault(config, 'checkpointOptions', '-z')
utils.setDefault(config, 'checkpointPrefix', 'nightly')
# Run verify at 10pm (24hr format) on Saturdays. Verify is run at an
# interval of 7days in a "heartbeat" mode.
utils.setDefault(config, 'verifyRunInterval', 7*24*60*60) # 7days.
utils.setDefault(config, 'verifyFirstRun', "5,22:00:00") # 10pm
utils.setDefault(config, 'verifyHeartbeat', True)
utils.setDefault(config, 'verifyNotifyRun', False)
# timedelta object
utils.setDefault(config, 'verifyExpectedRunDuration', jobExpectedRunDuration)
utils.setDefault(config, 'verifyEnabled', True)
# A transient file that is used to track the list of journal files that have
# been already synced and so should be excluded in future syncs.
utils.setDefault(config, 'jnlExcludeTmpFileName', '.jnlExcludeList')
# Set to False if you don't want the journals on the primary server be removed
# after transferring to backup server.
utils.setDefault(config, 'removeJnlsAfterSync', True)
if sys.platform not in ('win32', 'cygwin'):
utils.setDefault(config, 'p4ServiceNameLocal', None)
else:
utils.setDefault(config, 'p4ServiceNameLocal', 'perforce')
# Should we notify when the script is killed by pressing ^C.
utils.setDefault(config, 'notifyKill', False)
# Should we notify when the script exits.
utils.setDefault(config, 'notifyExit', True)
# When there are errors/exceptions, wait before restarting the program.
utils.setDefault(config, 'errRestartDelay', 30*60) # 30min.
##
## End of advanced configuration.
##
rootLogger.setLevel(utils.getLogLevelForStr(logLevel))
logFilePath = scriptsRoot+"/"+logFile
def openLog():
log.info("------------opening log----------------")
def closeLog():
log.info("------------closing log----------------")
# Configure File logging support if not already configured.
# NOTE: Wait until scriptsRoot is determined.
if not utils.typeIn(logging.handlers.RotatingFileHandler, rootLogger.handlers) \
and logFile != None:
# Make sure the directory for logFilePath exists.
utils.makeDir(os.path.dirname(logFilePath))
handler = logging.handlers.RotatingFileHandler(
logFilePath, "a", logFileMaxSize, logFileMaxOld)
handler.setFormatter(logging.Formatter(logFormat))
rootLogger.addHandler(handler)
openLog()
import atexit
atexit.register(closeLog)
p4OptionsRemote = '-p '+p4PortRemote+' -u '+p4User
if packageInPerforce:
p4OptionsRemote += ' -c '+p4ClientRemote
if p4Password:
p4OptionsRemote += ' -P '+p4Password
p4OptionsLocal = '-p '+p4PortLocal+' -u '+p4User
if packageInPerforce:
p4OptionsLocal += ' -c '+p4ClientLocal
if p4Password:
p4OptionsLocal += ' -P '+p4Password
# This is used only to determine the "Client Root:" on the remote m/c.
p4RemoteOptions = '-p '+p4PortRemote+' -u '+p4User
if packageInPerforce:
p4RemoteOptions += ' -c '+p4RemoteClient
if p4Password:
p4RemoteOptions += ' -P '+p4Password
scriptsRootPsx = utils.getPosixPath(scriptsRoot)
# We need the local path for all perforce operations.
scriptsRootNtv = utils.getNativePath(scriptsRoot)
if packageInPerforce:
result = utils.execute('p4 '+p4OptionsRemote+' info', verbosity=0)
if utils.shell_error != 0:
optparser.error('Unknown error executing p4 info: '+result)
if re.search('Client unknown', result):
optparser.error('p4ClientRemote: '+p4ClientRemote+' is not confgured.')
p4RootRemote = utils.extractFieldValue('Server root', ': ', result)
# Determine the depot path.
result = utils.execute('p4 '+p4OptionsRemote+' where '+scriptsRootNtv)
if utils.shell_error != 0:
optparser.error("couldn't determine the depot path to: "+scriptsRoot)
# NOTE: Assuming there are no spaces in the view.
p4ScriptsView = string.split(result)[0]
p4RootLocalPsx = utils.getPosixPath(p4RootLocal)
# rsyncRemoteConfig is determined only when the daemon mode is chosen.
if rsyncConfigFileName != None:
if packageInPerforce:
result = utils.execute(remoteShell+' '+p4HostRemote+
' p4 '+p4RemoteOptions+' where '+p4ScriptsView,
expectedRunDuration=datetime.timedelta(minutes=5))
if utils.shell_error != 0:
optparser.error("Couldn't run p4 where on remote machince: "+result)
else:
# Required for the rsyncd.conf and any other configuration files for processes
# on the remote host.
remoteScriptsRoot = utils.getMixedPath(
string.split(result)[2]).strip()
# I think a windows path is ok as a --config file.
rsyncRemoteConfig = remoteScriptsRoot+'/'+rsyncConfigFileName
else:
rsyncRemoteConfig = getPosixPath(rsyncRemoteConfDir, p4HostRemote)+\
'/'+rsyncConfigFileName
checkptRecoveryRequired = False
dbrevFile = p4RootLocal+'/'+'db.rev'
# Check if we need to recover the entire database from a checkpoint.
# We will check for the existence of "db.rev" file and if it is of
# significant size (as of 2004/02/04, the file size is about 16k).
if not os.path.isfile(dbrevFile) or (os.stat(dbrevFile).st_size < 20*1024L):
# Also indicates that the local perforce server is not usable.
checkptRecoveryRequired = True
del dbrevFile
utils.setDefault(config, 'notifyCheckpointRecovery', True)
# Remove the filename part.
p4JournalDirRemote = os.path.dirname(p4JournalRemote)
# For rsync, it is better to have a cygwin style path.
p4RootRemotePsx = utils.getPosixPath(p4RootRemote, p4HostRemote)
p4JournalDirRemotePsx = utils.getPosixPath(p4JournalDirRemote, p4HostRemote)
# Generate a hash of rsync modules to their remote paths.
rsyncModulePathLookup = {}
if rsyncConfigFileName == None:
rsyncModulePathLookup[rsyncRootModuleName] = p4RootRemotePsx
rsyncModulePathLookup[rsyncJnlModuleName] = p4JournalDirRemotePsx
if quickRsyncExcludeFile != None:
quickRsyncExcludePath = scriptsRootPsx+'/'+quickRsyncExcludeFile
else:
quickRsyncExcludePath = None
## Dump all configuration flags.
log.info('----Start of configuration flags----')
del result
names = dir(config)
names.sort()
for name in names:
if not re.match(r'__\w+__', name):
val = getattr(config, name)
# Print only attributes of all built-in types.
# If not a module, class or instance object (comparing with known
# types).
if not type(val) == type(config) and\
not type(val) == type(optparse.OptionParser) and\
not type(val) == type(closeLog) and\
not type(val) == type(log):
#if not re.search(r"type 'module'", str(type(val))):
log.info('--%s=%s--', name, val)
log.info('----End of configuration flags----')