#!/usr/bin/env python3
# -*- encoding: UTF8 -*-
# test_SDP.py
# Tests SDP (Server Deployment Package) on Linux VMs
# Intended to be run from with a Docker container.
# See documentation and run_tests.sh in /sdp/main/test/README.md
from __future__ import print_function
import argparse
import fileinput
import glob
import logging
import os
import pwd
import re
import socket
import stat
import subprocess
import sys
import time
import unittest
import P4
LOGGER_NAME = 'SDPTest'
mkdirs_script = '/hxdepots/sdp/Server/Unix/setup/mkdirs.sh'
mkdirs_config = '/hxdepots/sdp/Server/Unix/setup/mkdirs.cfg'
MAILTO = 'mailto-admin@example.com'
MAILFROM = 'mailfrom-admin@example.com'
logger = logging.getLogger(LOGGER_NAME)
options = None
class NotSudo(Exception):
pass
def get_host_ipaddress():
try:
address = socket.gethostbyname(socket.gethostname())
# On my system, this always gives me 127.0.0.1. Hence...
except:
address = ''
if not address or address.startswith('127.'):
# ...the hard way.
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('4.2.2.1', 0))
address = s.getsockname()[0]
s.detach()
logger.debug('IPAddress: %s' % address)
return address
def init_logging():
global logger
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s:%(name)s:%(levelname)s: %(message)s')
fh = logging.FileHandler('/tmp/%s.log' % LOGGER_NAME, mode='w')
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
def do_unlink(filename):
"Unlink file if it exists"
if os.path.lexists(filename):
os.unlink(filename)
def substitute_unix_vars(line, instance, port):
brokerport=int(port)+1
line = line.rstrip()
if line.startswith('export MAILTO='):
print("export MAILTO=%s" % MAILTO)
elif line.startswith('export SSL_PREFIX=ssl:'):
print("export SSL_PREFIX=")
elif line.startswith('export MAILFROM='):
print("export MAILFROM=%s" % MAILFROM)
elif line.startswith('export P4PORTNUM='):
print("export P4PORTNUM=%s" % port)
elif line.startswith('export P4BROKERPORTNUM='):
print("export P4BROKERPORTNUM=%s" % brokerport)
elif line.startswith('export KEEPLOGS='):
print("export KEEPLOGS=3")
elif line.startswith('export KEEPCKPS='):
print("export KEEPCKPS=3")
elif line.startswith('export VERIFY_SDP_SKIP_TEST_LIST='):
print("export VERIFY_SDP_SKIP_TEST_LIST=crontab")
elif line.startswith('export KEEPJNLS='):
print("export KEEPJNLS=3")
else:
print(line)
def configure_p4_vars(instance, port):
"Configure p4_vars"
for line in fileinput.input('/p4/common/bin/p4_vars', inplace=True):
substitute_unix_vars(line, instance, port)
def configure_instance_vars(instance, port):
"Configure instance vars"
for line in fileinput.input('/p4/common/config/p4_%s.vars' % instance, inplace=True):
substitute_unix_vars(line, instance, port)
class SDPTest_base(unittest.TestCase):
"Generic test class for others to inherit"
server_id = "master.test"
def assertLinePresent(self, line, output):
"Asserts regex line present in output"
re_line = re.compile(line, re.MULTILINE)
self.assertTrue(re_line.search(output), "%s not found in:\n%s" % (line, output))
def assertLineNotPresent(self, line, output):
"Asserts regex line NOT present in output"
re_line = re.compile(line, re.MULTILINE)
self.assertFalse(re_line.search(output), "%s found in:\n%s" % (line, output))
def setup_everything(self):
if 'perforce' != pwd.getpwuid(os.getuid())[0]:
raise Exception("This test harness should be run as user 'perforce'")
try:
subprocess.check_call("sudo ls > /dev/null", shell=True, timeout=20)
except Exception:
raise NotSudo("This test harness must be run as user perforce with sudo privileges or it will not work.")
def setUp(self):
self.setup_everything()
def run_test(self):
pass
def run_cmd(self, cmd, dir=".", get_output=True, timeout=35, stop_on_error=True):
"Run cmd logging input and output"
output = ""
try:
logger.debug("Running: %s" % cmd)
if get_output:
p = subprocess.Popen(cmd, cwd=dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, shell=True)
output, _ = p.communicate(timeout=timeout)
rc = p.returncode
logger.debug("Output:\n%s" % output)
else:
result = subprocess.check_call(cmd, stderr=subprocess.STDOUT, shell=True, timeout=timeout)
logger.debug('Result: %d' % result)
except subprocess.CalledProcessError as e:
logger.debug("Output: %s" % e.output)
if stop_on_error:
msg = 'Failed run_cmd: %d %s' % (e.returncode, str(e))
logger.debug(msg)
self.fail(msg)
except Exception as e:
logger.debug("Output: %s" % output)
if stop_on_error:
msg = 'Failed run_cmd: %s' % str(e)
logger.debug(msg)
self.fail(msg)
return output
def sudo_cmd(self, cmd, dir=".", get_output=True, timeout=35, stop_on_error=True):
"Run cmd with sudo"
output = self.run_cmd("sudo %s" % cmd, dir=dir, get_output=get_output, timeout=timeout, stop_on_error=stop_on_error)
return output
def configure_mkdirs(self, instance):
"Configure mkdirs.cfg with a couple of key variables"
ipaddr = get_host_ipaddress()
for line in fileinput.input(mkdirs_config, inplace=True):
line = line.rstrip()
if line.startswith('P4MASTERHOST'):
print("P4MASTERHOST=%s" % ipaddr)
elif line.startswith('P4MASTER='):
print("P4MASTER=%s" % ipaddr)
elif line.startswith('MASTER_ID='):
print("MASTER_ID=%s" % self.server_id)
elif line.startswith('P4ADMINPASS'):
print("P4ADMINPASS=Password1")
elif line.startswith('MASTERINSTANCE'):
print("MASTERINSTANCE=%s" % instance)
elif line.startswith('HOST_IP'):
print("HOST_IP=%s" % ipaddr)
elif line.startswith('DB1'):
print("DB1=hxmetadata")
elif line.startswith('DB2'):
print("DB2=hxmetadata")
else:
print(line)
def download_binaries(self, version):
"Download Perforce Helix binaries."
output = self.sudo_cmd("./get_helix_binaries.sh -r r%s" % version, dir="/hxdepots/sdp/helix_binaries", timeout=120)
re_line = re.compile("Downloading of Helix files completed OK", re.MULTILINE)
if re_line.search(output):
return True
else:
return False
def disable_p4p(self, instance):
"Disable p4p by removing /p4/N/bin/p4p_N and /p4/N/bin/p4p_N_init."
logger.debug("disable_p4p()")
p4pInitScript="/p4/%s/bin/p4p_%s_init" % (instance, instance)
p4pSymlink="/p4/%s/bin/p4p_%s" % (instance, instance)
if not os.path.exists(p4pInitScript):
p4pInitScript="/tmp/%s" % p4pInitScript
p4pSymlink="/tmp/%s" % p4pSymlink
if not os.path.exists(p4pInitScript):
return
self.run_cmd("rm -f %s %s" % (p4pInitScript, p4pSymlink))
def disable_email(self):
"Disable SDPMAIL setting as test environment has no email server."
logger.debug("disable_email()")
if os.path.exists('/p4/common/bin/p4_vars'):
envFile='/p4/common/bin/p4_vars'
elif os.path.exists('/tmp/p4/common/bin/p4_vars'):
envFile='/tmp/p4/common/bin/p4_vars'
else:
return
self.sudo_cmd("mv %s %s.bak" % (envFile, envFile))
self.sudo_cmd("sed s:SDPMAIL=:SDPMAIL='echo mail':g %s.bak > %s" % (envFile, envFile))
self.sudo_cmd("chown perforce:perforce %s" % envFile)
def run_mkdirs(self, instance, testArg=""):
"Runs the mkdirs script"
self.sudo_cmd("mkdir /hxmetadata") # don't use /hxmetadata1 (or 2)
cmd = "%s %s %s" % (mkdirs_script, instance, testArg)
output = self.sudo_cmd(cmd, dir="/hxdepots/sdp/Server/Unix/setup")
valid_lines = ["Verified: Running as root",
r"Warning: \(line: \d+\) No p4p in /hxdepots/sdp/Server/Unix/p4/common/bin",
r"Warning: \(line: \d+\) No p4broker in /hxdepots/sdp/Server/Unix/p4/common/bin",
"Appending configuration section .*Maintenance/maintenance.cfg",
"Verified: Preflight checks passed.",
"Setting permissions on depot files - this may take some time \.\.\.",
"Setting ownership on depot files - this may take some time \.\.\.",
"It is recommended that the perforce's umask be changed to 0026 to block world access to Perforce files\.",
"Add umask 0026 to perforce's \.bash_profile to make this change\.",
"\*+ \-test specified \- will install to /tmp/p4 \*+",
"This was done in TEST mode \- please run the following command to see any changes should be",
"applied to your live environment \(manually\):",
"diff \-r /p4/1/bin /tmp/p4/1/bin",
"diff \-r /p4/master/bin /tmp/p4/master/bin",
"diff \-r /p4/common /tmp/p4/common",
"If upgrading an older SDP version then be careful to ensure files in /p4/common/config are correct",
r"Log is: .*",
r"Started mkdirs.sh v.*",
r".*/mkdirs.sh",
r"Loading config file: .*/mkdirs.cfg",
r"Overriding SDP setting in test mode to.*",
"and update that /p4/common/bin/p4_vars is appropriate\."]
re_lines = [re.compile(x) for x in valid_lines]
for line in output.split('\n'):
line = line.strip()
if not line:
continue
found = False
for re_line in re_lines:
if re_line.search(line):
found = True
break
# mkdirs.sh is now more pedantic, making maintenance of unexpected extra lines
# impractical.
###if not found:
### self.fail('Unexpected line in mkdirs output:\n%s\noutput:\n%s' % (line, output))
def readLog(self, log_name, instance):
"Read the appropriate log file contents"
with open('/p4/%s/logs/%s' % (instance, log_name), 'r') as fh:
log_contents = fh.read()
fh.close()
return log_contents
#--- Test Cases
class configure_master(SDPTest_base):
def check_dirs(self, rootdir, dirlist):
"Checks specified directories are present"
found_dirs = self.run_cmd("find %s -type d" % rootdir, stop_on_error=False).split()
for d in [x.strip() for x in dirlist.split()]:
self.assertIn(d, found_dirs)
def check_links(self, rootdir, linklist):
"Checks specified links are present"
found_links = self.run_cmd("find %s -type l" % rootdir, stop_on_error=False).split()
for link in [x.strip() for x in linklist.split()]:
self.assertIn(link, found_links)
def check_dirs_exactly(self, rootdir, dirlist):
"Checks specified directories and only those are present"
found_dirs = self.run_cmd("find %s -type d" % rootdir, stop_on_error=False).split()
dirs = [x.strip() for x in dirlist.split()]
for d in dirs:
self.assertIn(d, found_dirs)
for d in found_dirs:
self.assertIn(d, dirs)
def install_service(self, instance, stop_on_error=True):
"Install systemd service"
### Systemd works on podman when container has correct capabilities - see run_docker_tests.sh
systemd_file = f"/etc/systemd/system/p4d_{instance}.service"
if not os.path.exists(systemd_file):
self.run_cmd(f"cat /p4/sdp/Server/Unix/setup/systemd/p4d_{instance}.service | sed -e 's/1/{instance}/g' | sudo tee {systemd_file}", get_output=False, stop_on_error=True)
self.run_cmd(f"sudo systemctl enable p4d_{instance}", get_output=False, stop_on_error=True)
def p4service(self, cmd, instance, stop_on_error=True):
"Start or stop service"
### Systemd works on podman when container has correct capabilities - see run_docker_tests.sh
self.run_cmd("sudo systemctl %s p4d_%s" % (cmd, instance), get_output=False, stop_on_error=True)
def remove_test_dirs(self, instances):
"Remove all appropriate directories created"
dirs_to_remove = "/hxdepots/sdp /hxdepots/p4 /hxmetadata/p4 /hxmetadata1/p4 /hxmetadata2/p4 /hxlogs/p4".split()
for instance in instances:
dirs_to_remove.append("/p4/%s" % instance)
for d in dirs_to_remove:
if os.path.exists(d):
self.sudo_cmd("rm -rf %s" % d)
for instance in instances:
for f in ["/p4/common"]:
if os.path.lexists(f):
self.sudo_cmd("unlink %s" % f)
def liveCheckpointTest(self, instance):
"Test live checkpoint script"
self.assertFalse(os.path.exists('/p4/%s/offline_db/db.domain' % instance))
self.run_cmd('/p4/common/bin/live_checkpoint.sh %s' % instance)
# Quick check on c=log file contents
log_contents = self.readLog('checkpoint.log', instance)
self.assertRegex(log_contents, "Checkpointing to /p4/%s/checkpoints/p4_%s.ckp" % (instance, instance))
self.assertRegex(log_contents, "journal")
# Make sure offline db is present
self.assertTrue(os.path.exists('/p4/%s/offline_db/db.domain' % instance))
def parallelCheckpointTest(self, instance):
"Test parallel checkpoint script"
# Enable parallel checkpoint feature.
self.run_cmd("echo export DO_PARALLEL_CHECKPOINTS=1 >> /p4/common/config/p4_%s.vars" % instance)
# Set a low number of checkpoints to keep so we can verify cleanup.
self.run_cmd("echo export KEEPCKPS=2 >> /p4/common/config/p4_%s.vars" % instance)
# Run daily_checkpoint.sh, now with parallel feature enabled.
self.run_cmd('/p4/common/bin/daily_checkpoint.sh %s' % instance)
log_contents = self.readLog('checkpoint.log', instance)
self.assertRegex(log_contents, "-z -N 4 -jdp")
self.assertRegex(log_contents, "Parallel checkpoint dump completed OK.")
self.assertRegex(log_contents, "Dumping to directory /p4/%s/checkpoints/p4_%s.ckp." % (instance,instance))
self.assertRegex(log_contents, "Recovering from parallel checkpoint directory: /p4/%s/checkpoints/p4_%s.ckp." % (instance,instance))
self.assertRegex(log_contents, "-z -N 4 -jrp")
self.assertRegex(log_contents, "End p4_%s Checkpoint" % instance)
# Run refresh_P4ROOT_from_offline_db.sh with parallel feature enabled.
self.run_cmd('/p4/common/bin/refresh_P4ROOT_from_offline_db.sh %s' % instance)
log_contents = self.readLog('refresh_P4ROOT_from_offline_db.log', instance)
self.assertRegex(log_contents, "Parallel checkpoint dump completed OK. Writing: /p4/%s/checkpoints/p4_%s.ckp" % (instance,instance))
self.assertRegex(log_contents, "Dumping to directory /p4/%s/checkpoints/p4_%s.ckp." % (instance,instance))
self.assertRegex(log_contents, "End p4_%s Refresh P4ROOT from offline_db" % instance)
# Run live_checkpoint.sh with parallel feature enabled.
self.run_cmd('/p4/common/bin/live_checkpoint.sh %s' % instance)
log_contents = self.readLog('checkpoint.log', instance)
self.assertRegex(log_contents, "Live parallel checkpoint completed OK. Writing: /p4/%s/checkpoints/p4_%s.ckp" % (instance,instance))
self.assertRegex(log_contents, "Removing checkpoint dir: rm -rf \"/p4/%s/checkpoints/p4_%s.ckp." % (instance,instance))
self.assertRegex(log_contents, "Removing journal file with: rm -f \"/p4/%s/checkpoints/p4_%s.jnl." % (instance,instance))
self.assertRegex(log_contents, "End p4_%s Checkpoint" % instance)
# Run recreate_offline_db.sh with parallel feature enabled.
self.run_cmd('/p4/common/bin/recreate_offline_db.sh %s' % instance)
log_contents = self.readLog('recreate_offline_db.log', instance)
self.assertRegex(log_contents, "Recovering from parallel checkpoint directory: /p4/%s/checkpoints/p4_%s.ckp." % (instance,instance))
self.assertRegex(log_contents, "End p4_%s recreate offline db" % instance)
def recreateOfflineDBTest(self, instance):
"Test recreate_offline_db script"
self.assertTrue(os.path.exists('/p4/%s/offline_db/db.domain' % instance))
self.sudo_cmd("rm -rf /p4/%s/offline_db/db.*" % instance)
self.run_cmd('/p4/common/bin/recreate_offline_db.sh %s' % instance)
# Quick check on log file contents
logPattern = '/p4/%s/logs/recreate_offline_db.log.*' % instance
logfiles = glob.glob(logPattern)
self.assertEqual(1, len(logfiles))
log_contents = self.readLog(os.path.basename(logfiles[0]), instance)
self.assertRegex(log_contents, "Start p4_%s recreate of offline db" % (instance))
self.assertRegex(log_contents, "Recovering from serial checkpoint file: /p4/%s/checkpoints/p4_%s.ckp" % (
instance, instance))
# Make sure offline db is present
self.assertTrue(os.path.exists('/p4/%s/offline_db/db.domain' % instance))
def failedDailyBackupTest(self, instance):
"Test daily backup script - expected to fail due to lack of offline db"
logger.debug("failedDailyBackupTest")
jnl_counter = self.p4run('counter', 'journal')[0]['value']
self.run_cmd('/p4/common/bin/daily_checkpoint.sh %s' % instance, stop_on_error=False)
# Quick check on c=log file contents
log_contents = self.readLog('checkpoint.log', instance)
self.assertRegex(log_contents, "Offline database not in a usable state")
new_jnl_counter = self.p4run('counter', 'journal')[0]['value']
self.assertEqual(int(new_jnl_counter), int(jnl_counter))
def dailyBackupTest(self, instance):
"Test daily backup script"
jnl_counter = self.p4run('counter', 'journal')[0]['value']
logger.debug("dailyBackupTest")
self.run_cmd('/p4/common/bin/daily_checkpoint.sh %s' % instance)
# Quick check on log file contents
log_contents = self.readLog('checkpoint.log', instance)
self.assertRegex(log_contents, "Dumping to /p4/%s/checkpoints/p4_%s.ckp" % (instance, instance))
self.assertRegex(log_contents, "journal")
new_jnl_counter = self.p4run('counter', 'journal')[0]['value']
self.assertEqual(int(new_jnl_counter), int(jnl_counter) + 1)
def rotateJournalTest(self, instance):
"Test rotate_journal.sh script"
self.run_cmd('/p4/common/bin/rotate_journal.sh %s' % instance)
# Quick check on log file contents
log_contents = self.readLog('checkpoint.log', instance)
self.assertRegex(log_contents, "End p4_%s journal rotation" % instance)
def loadCheckpointTest(self, instance):
"Test load_checkpoint.sh script"
logger.debug("loadCheckpointTest")
self.run_cmd('/p4/common/bin/load_checkpoint.sh /p4/%s/checkpoints/p4_%s.ckp.9.gz /p4/%s/logs/journal -k -i %s -y -l -si -L /p4/%s/logs/load_checkpoint.log' % (instance, instance, instance, instance, instance))
# Quick check on log file contents
log_contents = self.readLog('load_checkpoint.log', instance)
self.assertRegex(log_contents, "MovedDBs")
self.assertRegex(log_contents, "Checkpoint load processing took")
def refreshP4ROOTFromOfflineDBTest(self, instance):
"Test refresh_P4ROOT_from_offline_db.sh script"
logger.debug("refreshP4ROOTFromOfflineDBTest")
self.run_cmd('/p4/common/bin/refresh_P4ROOT_from_offline_db.sh %s' % instance)
# Quick check on log file contents
log_contents = self.readLog('refresh_P4ROOT_from_offline_db.log', instance)
self.assertRegex(log_contents, "End p4_%s Refresh P4ROOT from offline_db" % instance)
def mkrepTest(self, instance):
"Test mkrep.sh script"
logger.debug("mkrepTest")
# Generate the SiteTags.cfg file required by mkrep.sh
fname = '/p4/common/config/SiteTags.cfg'
with open(fname, 'w') as fh:
fh.write('bos: Boston\nsyd: Sydney\n')
self.run_cmd('/p4/common/bin/mkrep.sh -i %s -t edge -r syd-helix-04 -s syd -p -os -L /p4/%s/logs/mkrep.log' % (instance,instance))
# Quick check on log file contents
log_contents = self.readLog('mkrep.log', instance)
self.assertRegex(log_contents, "That took")
def edgeDumpTest(self, instance, edge_id):
"Test edge_dump.sh script"
logger.debug("edgeDumpTest")
self.run_cmd('/p4/common/bin/edge_dump.sh %s %s' % (instance, edge_id))
# Quick check on log file contents
log_contents = self.readLog('/p4/%/logs/edge_dump.*.log', instance)
self.assertRegex(log_contents, "Edge seed checkpoint complete")
def verifyTest(self, instance):
"Test verify script"
logger.debug("verifyTest")
verify_cmd1 = '/p4/common/bin/p4verify.sh %s -L /p4/%s/logs/p4verify.run1.log' % (instance,instance)
verify_cmd2 = '/p4/common/bin/p4verify.sh %s -L /p4/%s/logs/p4verify.run2.log' % (instance,instance)
verify_cmd3 = '/p4/common/bin/p4verify.sh %s -L /p4/%s/logs/p4verify.run3.log' % (instance,instance)
verify_recent_cmd = '/p4/common/bin/p4verify.sh %s -recent -L /p4/%s/logs/p4verify.recent.log' % (instance,instance)
self.run_cmd(verify_cmd1)
log_contents = self.readLog('p4verify.run1.log', instance)
for depot in ["depot"]:
verify_ok = re.compile("verify -qz //%s/...\nexit: 0" % depot, re.MULTILINE)
self.assertRegex(log_contents, verify_ok)
for depot in ["specs"]:
verify_ok = re.compile("verify -q //%s/...\nexit: 0" % depot, re.MULTILINE)
self.assertRegex(log_contents, verify_ok)
# Make sure we check for shelves in the 'depot' depot.
verify_ok = re.compile("There were no shelved changelists to verify")
# Check that we use '-U' for unload depot, and do not do -qS (as there are no shelves
# in an unload depot). Also, we expect errors about their not being any files in the
# unload depot. Note that, depending on the p4d version, this may show as 'error:'
# or 'warning:'; we accept either here.
verify_ok = re.compile(r"verify -q -U //unload/...\n(error|warning): //unload/... - no such unloaded.*\nexit: 0", re.MULTILINE)
self.assertRegex(log_contents, verify_ok)
# Streams depot doesn't have any files so gives an error - we just search for it
verify_ok = re.compile(r"verify -qz //streams/...\n(error|warning): //streams/... - no such.*\nexit: 0", re.MULTILINE)
self.assertRegex(log_contents, verify_ok)
# Now create verify errors and make sure we see them
orig_depot_name = '/p4/%s/depots/depot' % instance
new_depot_name = orig_depot_name + '.new'
trash_depot_name = orig_depot_name + '.trash'
os.rename(orig_depot_name, new_depot_name)
self.run_cmd(verify_cmd2, stop_on_error=False)
log_contents = self.readLog('p4verify.run2.log', instance)
for depot in ["depot"]:
verify_ok = re.compile("verify -qz //%s/...\nerror: [^\n]*MISSING!\nexit: 1" % depot, re.MULTILINE)
self.assertRegex(log_contents, verify_ok)
# Rename things back again and all should be well!
if os.path.exists (orig_depot_name):
os.rename(orig_depot_name, trash_depot_name)
os.rename(new_depot_name, orig_depot_name)
time.sleep(3) # Make sure verify log is OK.
self.run_cmd(verify_cmd3, stop_on_error=True)
log_contents = self.readLog('p4verify.run3.log', instance)
for depot in ["depot"]:
verify_ok = re.compile("verify -qz //%s/...\nexit: 0" % depot, re.MULTILINE)
self.assertRegex(log_contents, verify_ok)
for depot in ["specs"]:
verify_ok = re.compile("verify -q //%s/...\nexit: 0" % depot, re.MULTILINE)
self.assertRegex(log_contents, verify_ok)
time.sleep(3) # Make sure verify log is OK.
self.run_cmd(verify_recent_cmd, stop_on_error=False)
log_contents = self.readLog('p4verify.recent.log', instance)
verify_ok = re.compile("changelists per depot due to -recent", re.MULTILINE)
self.assertRegex(log_contents, verify_ok)
def verifyVerify(self, instance):
"Test verify_sdp.sh script"
logger.debug("verifyVerify")
verify_cmd = '/p4/common/bin/verify_sdp.sh %s -skip cron' % instance
self.run_cmd(verify_cmd)
# Quick check on log file contents
log_contents = self.readLog('verify_sdp.log', instance)
self.assertRegex(log_contents, r"NO ERRORS: \d+ verifications completed, with 1 warnings detected.")
def verifyJournalCorruption(self, instance):
"Test server restart detetcts journal corruption"
logger.debug("verifyJournalCorruption")
self.p4service("stop", instance)
self.p4service("start", instance)
self.p4.disconnect()
self.connectP4(self.p4)
jnl_counter = self.p4run('counter', 'journal')[0]['value']
log_contents = self.readLog('p4d_init.log', instance)
self.assertNotRegex(log_contents, "Error: possible corruption at end of journal detected")
fname = '/p4/%s/logs/journal' % instance
# Journal with an invalid initial line - testing for when tail -10000 happens to chop a record in half
self.p4service("stop", instance)
with open(fname, 'w') as fh:
fh.write("""@ @//some/path@ @@ 1
@ex@ 31884 1605660325
@rv@ 7 @db.user@ @z_build@ @z_build@@example.com@ @@ 1511013611 1605660070 @z_build@ @C2999B31D3A83F4F6651DAB32FAB0861@ 1 @99C5A122A727E54C327E0B3286346F00@ 2147483647 0 1511072211 0 0 0
@ex@ 31910 1605660327
""")
self.p4service("start", instance)
self.p4.disconnect()
self.connectP4(self.p4)
jnl_counter = self.p4run('counter', 'journal')[0]['value']
log_contents = self.readLog('p4d_init.log', instance)
self.assertNotRegex(log_contents, "Error: possible corruption at end of journal detected")
# Totally invalid journal test
self.p4service("stop", instance)
with open(fname, 'a') as fh:
fh.write('corruption journal data\n')
self.p4service("start", instance)
self.p4.disconnect()
self.connectP4(self.p4)
# Quick check on log file contents
log_contents = self.readLog('p4d_init.log', instance)
self.assertRegex(log_contents, "Error: possible corruption at end of journal detected")
new_jnl_counter = self.p4run('counter', 'journal')[0]['value']
self.assertEqual(int(new_jnl_counter), int(jnl_counter) + 1)
def configureServer(self, instance):
"Set various configurables for master"
configurables = """
security=3
auth.id=p4_auth
run.users.authorize=1
db.peeking=2
dm.user.noautocreate=2
dm.user.resetpassword=1
filesys.P4ROOT.min=10M
filesys.depot.min=10M
filesys.P4JOURNAL.min=10M
filesys.P4LOG.min=10M
server=3
net.tcpsize=256k
lbr.bufsize=256k
server.commandlimits=2
serverlog.retain.3=7
serverlog.retain.7=7
serverlog.retain.8=7""".split("\n")
instance_configurables = """
journalPrefix=/p4/SDP_INSTANCE/checkpoints/p4_SDP_INSTANCE
server.depot.root=/p4/SDP_INSTANCE/depots
serverlog.file.3=/p4/SDP_INSTANCE/logs/errors.csv
serverlog.file.7=/p4/SDP_INSTANCE/logs/events.csv
serverlog.file.8=/p4/SDP_INSTANCE/logs/integrity.csv""".split("\n")
for c in [x.strip() for x in configurables]:
if c:
self.p4run("configure", "set", c)
for ic in instance_configurables:
ic = ic.strip()
if ic:
self.p4run("configure", "set", ic.replace("SDP_INSTANCE", instance))
def configureReplication(self):
"Configures stuff required for replication"
def p4run(self, *args):
"Run the command logging"
logger.debug('p4 cmd: %s' % ",".join([str(x) for x in args]))
result = self.p4.run(args)
logger.debug('result: %s' % str(result))
return result
def resetTest(self, instances):
for instance in instances:
self.sudo_cmd("ps -ef | grep p4d_%s | awk '{print $2}' | xargs kill > /dev/null 2>&1" % instance, stop_on_error=False)
self.remove_test_dirs(instances)
self.sudo_cmd("cp -R /sdp /hxdepots/sdp")
self.sudo_cmd("rm -rf /tmp/p4")
self.sudo_cmd("sudo chown -R perforce:perforce /hxdepots/sdp")
for f in ["/p4/p4.crontab", "/p4/p4.crontab.replica", "/p4/p4.crontab.edge",
"/tmp/p4/p4.crontab", "/tmp/p4/p4.crontab.replica", "/tmp/p4/p4.crontab.edge"]:
if os.path.exists(f):
os.remove(f)
for instance in instances:
d = "/p4/%s" % instance
if os.path.exists(d):
self.sudo_cmd("rm -rf %s" % d)
if os.path.exists(d.lower()):
self.sudo_cmd("rm -rf %s" % d.lower())
for instance in instances:
service_file = f"p4d_{instance}.service"
systemd_file = f"/etc/systemd/system/{service_file}"
if os.path.exists(systemd_file):
self.sudo_cmd(f"sudo systemctl disable {service_file}")
self.sudo_cmd(f"sudo rm -f {systemd_file}")
def mkdirsTest(self, instance, version):
"Runs mkdirs with -test option and makes sure all is OK"
# Stop the Perforce service if currently running from a previous run in case it is accessing dirs
self.resetTest(instance)
self.configure_mkdirs(instance)
self.download_binaries(version)
self.run_mkdirs(instance, "-test")
self.disable_email()
self.disable_p4p(instance)
# Check dirs are empty
self.check_dirs('/tmp/hxmounts/hxdepots', '/tmp/hxmounts/hxdepots')
self.check_dirs('/tmp/hxmounts/hxlogs', '/tmp/hxmounts/hxlogs')
self.check_dirs('/tmp/hxmounts/hxmetadata', '/tmp/hxmounts/hxmetadata')
link_list = """
/tmp/p4/common"""
self.check_links('/tmp/p4', link_list)
self.check_dirs('/tmp/p4', '/tmp/p4')
dir_list = """
/tmp/hxmounts/hxdepots/p4/common/bin
/tmp/hxmounts/hxdepots/p4/common/bin/triggers
/tmp/hxmounts/hxdepots/p4/common/lib
/tmp/hxmounts/hxdepots/p4/SDP_INSTANCE/checkpoints
/tmp/hxmounts/hxdepots/p4/SDP_INSTANCE/depots""".replace("SDP_INSTANCE", instance)
self.check_dirs('/tmp/hxmounts/hxdepots', dir_list)
dir_list = """
/tmp/p4/SDP_INSTANCE/bin""".replace("SDP_INSTANCE", instance)
self.check_dirs('/tmp/p4/SDP_INSTANCE/'.replace("SDP_INSTANCE", instance), dir_list)
def configureInstance(self, instance, version, port):
"Configure the master instance"
# Stop the Perforce service if currently running from a previous run in case it is accessing dirs
self.resetTest(instance)
self.configure_mkdirs(instance)
self.download_binaries(version)
self.run_mkdirs(instance)
self.install_service(instance)
self.disable_email()
self.disable_p4p(instance)
depotdata_dir_list = """
/hxdepots/p4
/hxdepots/p4/common
/hxdepots/p4/common/bin
/hxdepots/p4/common/bin/triggers
/hxdepots/p4/common/lib
/hxdepots/p4/SDP_INSTANCE
/hxdepots/p4/SDP_INSTANCE/depots
/hxdepots/p4/SDP_INSTANCE/checkpoints""".replace("SDP_INSTANCE", instance)
logdata_dir_list = """
/hxlogs
/hxlogs/p4
/hxlogs/p4/SDP_INSTANCE
/hxlogs/p4/SDP_INSTANCE/tmp
/hxlogs/p4/SDP_INSTANCE/logs""".replace("SDP_INSTANCE", instance)
metadata_dir_list = """
/hxmetadata
/hxmetadata/p4
/hxmetadata/p4/SDP_INSTANCE
/hxmetadata/p4/SDP_INSTANCE/db1
/hxmetadata/p4/SDP_INSTANCE/db1/save
/hxmetadata/p4/SDP_INSTANCE/db2
/hxmetadata/p4/SDP_INSTANCE/db2/save""".replace("SDP_INSTANCE", instance)
p4_link_list = """
/p4/SDP_INSTANCE/root
/p4/SDP_INSTANCE/offline_db
/p4/SDP_INSTANCE/checkpoints
/p4/SDP_INSTANCE/depots
/p4/SDP_INSTANCE/logs
/p4/SDP_INSTANCE/tmp""".replace("SDP_INSTANCE", instance)
p4_dir_list = """
/p4/SDP_INSTANCE/bin""".replace("SDP_INSTANCE", instance)
self.check_dirs('/hxdepots', depotdata_dir_list)
self.check_dirs('/hxlogs', logdata_dir_list)
self.check_dirs('/hxmetadata', metadata_dir_list)
self.check_dirs('/p4', p4_dir_list)
self.check_links('/p4/SDP_INSTANCE'.replace("SDP_INSTANCE", instance), p4_link_list)
id_file = '/p4/SDP_INSTANCE/root/server.id'.replace("SDP_INSTANCE", instance)
with open(id_file, 'r') as fh:
server_id = fh.read().strip()
self.assertEqual(server_id, self.server_id)
configure_instance_vars(instance, port)
configure_p4_vars(instance, port)
def assertLogCount(self, expected, logPattern):
logger.debug("Looking for logs: %s" % logPattern)
logs = glob.glob(logPattern)
self.assertEqual(3, len(logs))
def configure_master_p4d_instance(self, p4, instance):
# Create our user and set password
logger.debug('Creating user and setting password')
# Create our user and set password
user = p4.fetch_user('perforce')
p4.save_user(user)
p4.run_password('Password1', 'Password1')
p4.password = 'Password1'
p4.run_login()
# Make him superuser
prot = p4.fetch_protect()
p4.save_protect(prot)
# Things to setup
# - create spec depot
# - create a workspace and add at least one file
# - configure the various tunables
# - create server definitions - master and replica
# - create service user for replica
p4.run('configure', 'set', 'server.depot.root=/p4/%s/depots' % instance)
p4.run('admin', 'restart')
p4.disconnect() # New depot won't show up unless we do this
time.sleep(1)
self.connectP4(p4)
if instance == 'master':
if not os.path.lexists("/p4/%s" % instance.lower()):
self.run_cmd("ln -s /p4/%s /p4/%s" % (instance, instance.lower()))
depot = p4.fetch_depot('specs')
self.assertEqual(depot['Map'], 'specs/...')
depot['Type'] = 'spec'
p4.save_depot(depot)
depot = p4.fetch_depot('unload')
self.assertEqual(depot['Map'], 'unload/...')
depot['Type'] = 'unload'
p4.save_depot(depot)
depot = p4.fetch_depot('archive')
self.assertEqual(depot['Map'], 'archive/...')
depot['Type'] = 'archive'
p4.save_depot(depot)
depot = p4.fetch_depot('streams')
self.assertEqual(depot['Map'], 'streams/...')
depot['Type'] = 'stream'
p4.save_depot(depot)
p4.disconnect() # New depot won't show up unless we do this
self.connectP4(p4)
depots = p4.run_depots()
self.assertEqual(5, len(depots))
ws_name = 'test_ws'
ws = p4.fetch_client(ws_name)
ws['Root'] = '/tmp/test_ws'
ws['View'] = ['//depot/main/... //%s/...' % ws_name]
p4.save_client(ws)
p4.client = ws_name
if not os.path.exists(ws['Root']):
os.mkdir(ws['Root'])
fname = '/tmp/%s/file1' % ws_name
if os.path.exists(fname):
os.chmod(fname, stat.S_IWRITE)
os.unlink(fname)
with open(fname, 'w') as fh:
fh.write('test data\n')
p4.run_add(fname)
chg = p4.fetch_change()
chg['Description'] = 'Initial file'
p4.save_submit(chg)
changes = p4.run_changes()
self.assertEqual(1, len(changes))
def connectP4(self, p4):
# Connect with retries
retries = 10
for i in range(retries):
try:
p4.connect()
break
except:
logger.debug("sleeping to connect - retry %d" % i)
time.sleep(1)
if not p4.connected():
self.fail("failed to connect to server after %d retries" % retries)
def instanceSetup(self, instance, port, test_daily=False):
# Start instance and do the basics
self.p4service("start", instance)
p4 = P4.P4()
self.p4 = p4
p4.port = 'localhost:%s' % port
p4.user = 'perforce'
self.connectP4(p4)
# Create our user and set password
user = p4.fetch_user('perforce')
p4.save_user(user)
p4.run_password('', 'Password1')
p4.password = 'Password1'
p4.run_login()
# Make him superuser
prot = p4.fetch_protect()
p4.save_protect(prot)
self.configure_master_p4d_instance(p4, instance)
self.configureServer(instance)
if test_daily:
# Following 2 tests should fail due to lack of offline_db
self.failedDailyBackupTest(instance)
time.sleep(1)
# self.recreateDBFromOfflineTest(instance)
time.sleep(1)
self.liveCheckpointTest(instance)
time.sleep(1)
self.verifyVerify(instance)
def instanceTest(self, instance, port):
"Perforce complete tests"
ckpLogPattern = '/p4/%s/logs/checkpoint.log*' % instance
logPattern = '/p4/%s/logs/log*' % instance
self.instanceSetup(instance, port, test_daily=True)
# Now the offline_db should exist
# Run enough times to ensure logs get removed - (KEEPCKPS + 1)
self.dailyBackupTest(instance)
self.assertLogCount(3, ckpLogPattern)
time.sleep(1)
self.dailyBackupTest(instance)
self.assertLogCount(3, ckpLogPattern)
time.sleep(1)
self.dailyBackupTest(instance)
self.assertLogCount(3, ckpLogPattern)
time.sleep(1)
# Manually rotate journals again and ensure daily backup handles that
self.p4run('admin', 'journal', '/p4/%s/checkpoints/p4_%s' % (instance, instance))
self.dailyBackupTest(instance)
self.assertLogCount(3, ckpLogPattern)
time.sleep(1)
self.verifyTest(instance)
print('\n\nAbout to run recreate db from offline which sleeps for 5 seconds, so be patient...!')
self.dailyBackupTest(instance)
self.assertLogCount(3, ckpLogPattern)
self.assertLogCount(3, logPattern)
# Delete offline_db and check we can recreate
# self.recreateOfflineDBTest(instance)
self.assertLogCount(3, ckpLogPattern)
self.assertLogCount(3, logPattern)
time.sleep(1)
self.dailyBackupTest(instance)
# Note Daily doesn't increase the journal number so there are 2 with latest
self.assertLogCount(3, ckpLogPattern)
self.assertLogCount(3, logPattern)
self.verifyVerify(instance)
self.mkrepTest(instance)
self.refreshP4ROOTFromOfflineDBTest(instance)
self.loadCheckpointTest(instance)
self.parallelCheckpointTest(instance)
self.verifyJournalCorruption(instance)
def runTest(self):
"Configure the master instance"
all_instances = {"1": "1667",
"master": "2667"}
if options.instance and options.instance in all_instances:
instances = {options.instance: all_instances[options.instance]}
else:
instances = all_instances
for instance, port in instances.items():
self.resetTest(all_instances.keys())
if not options.setup:
self.mkdirsTest(instance, options.version)
self.configureInstance(instance, options.version, port)
if options.setup:
self.instanceSetup(instance, port)
else:
self.instanceTest(instance, port)
if __name__ == "__main__":
init_logging()
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--instance', default=None)
parser.add_argument('--setup', action='store_true')
parser.add_argument('--version', default='25.2')
# Note the followin sets the global options variable - used in runTest
options, args = parser.parse_known_args()
testrunner = None
unittest.main(testRunner=testrunner, argv=sys.argv[:1] + args)
| # | Change | User | Description | Committed | |
|---|---|---|---|---|---|
| #52 | 32236 | C. Thomas Tyler |
Released SDP 2025.2.32234 (2025/12/05). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #51 | 31677 | C. Thomas Tyler |
Released SDP 2025.1.31674 (2025/06/04). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #50 | 31077 | C. Thomas Tyler |
Released SDP 2024.2.31075 (2024/12/20). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #49 | 30915 | C. Thomas Tyler |
Released SDP 2024.1.30913 (2024/11/20). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #48 | 30388 | C. Thomas Tyler |
Released SDP 2024.1.30385 (2024/06/11). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #47 | 29443 | C. Thomas Tyler |
Released SDP 2022.2.29441 (2023/02/27). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #46 | 29401 | C. Thomas Tyler |
Released SDP 2022.2.29399 (2023/02/06). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #45 | 29252 | C. Thomas Tyler |
Released SDP 2022.2.29250 (2022/12/08). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #44 | 29205 | C. Thomas Tyler |
Released SDP 2022.1.29203 (2022/11/22). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #43 | 28858 | C. Thomas Tyler |
Released SDP 2022.1.28855 (2022/05/27). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #42 | 28651 | C. Thomas Tyler |
Released SDP 2021.2.28649 (2022/03/03). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #41 | 28240 | C. Thomas Tyler |
Released SDP 2021.1.28238 (2021/11/12). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #40 | 27761 | C. Thomas Tyler |
Released SDP 2020.1.27759 (2021/05/07). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #39 | 27331 | C. Thomas Tyler |
Released SDP 2020.1.27325 (2021/01/29). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #38 | 26480 | C. Thomas Tyler |
Released SDP 2019.3.26478 (2020/04/12). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #37 | 26470 | C. Thomas Tyler |
Released SDP 2019.3.26468 (2020/04/10). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #36 | 26403 | C. Thomas Tyler |
Released SDP 2019.3.26400 (2020/03/28). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #35 | 26161 | C. Thomas Tyler |
Released SDP 2019.3.26159 (2019/11/06). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #34 | 25596 | C. Thomas Tyler |
Released SDP 2019.2.25594 (2019/05/02). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #33 | 25483 | C. Thomas Tyler |
Released SDP 2019.1.25480 (2019/04/11). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #32 | 25279 | C. Thomas Tyler |
Released SDP 2019.1.25276 (2019/03/06). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #31 | 25245 | C. Thomas Tyler |
Released SDP 2019.1.25238 (2019/03/02). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #30 | 23331 | C. Thomas Tyler |
Released SDP 2017.4.23329 (2017/12/05). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #29 | 22685 | Russell C. Jackson (Rusty) | Update main with current changes from dev. | ||
| #28 | 22185 | C. Thomas Tyler |
Released SDP 2017.2.22177 (2017/05/17). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #27 | 21338 | C. Thomas Tyler |
Released SDP 2016.2.21328 (2016/12/16). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #26 | 20974 | C. Thomas Tyler |
Released SDP 2016.2.20972 (2016/11/01). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #25 | 20481 | C. Thomas Tyler |
Released SDP 2016.1.20460. Copy Up using 'p4 copy -r -b perforce_software-sdp-dev', with selective removal of changes related to work-in-progress files. |
||
| #24 | 19898 | C. Thomas Tyler |
Released SDP/MultiArch/2016.1/19888 (2016/07/07). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #23 | 19694 | C. Thomas Tyler |
Released SDP/MultiArch/2016.1/19661 (2016/06/08). Copy Up using 'p4 copy -r -b perforce_software-sdp-dev'. |
||
| #22 | 19414 | C. Thomas Tyler | Released SDP/MultiArch/2016.1/19410 (2016/05/17). | ||
| #21 | 18961 | C. Thomas Tyler | Released: SDP/MultiArch/2016.1/18958 (2016/04/08). | ||
| #20 | 18625 | Russell C. Jackson (Rusty) | Copy dev to main to pick up corrected test script. | ||
| #19 | 18619 | Russell C. Jackson (Rusty) | Updating main with current changes. | ||
| #18 | 16783 | Robert Cowham | Doc reference to test. | ||
| #17 | 16555 | C. Thomas Tyler |
Updated assertions in the test suite due to changes in @16554 (moved 'tmp' to logs volume). |
||
| #16 | 16372 | C. Thomas Tyler |
Follow on changes related to changing default admin user from 'p4admin' to 'perforce' in change in change 16370. //guest/perforce_software/sdp/main/Server/Unix/p4/common/bin/triggers/command_block.py # edit //guest/perforce_software/sdp/main/Server/Unix/p4/common/bin/triggers/keep_group_unset.py # edit //guest/perforce_software/sdp/main/Server/Unix/p4/common/lib/MinP4.pm # edit //guest/perforce_software/sdp/main/Server/Windows/setup/sdp_master_config.ini # edit //guest/perforce_software/sdp/main/Server/test/test_SDP.py # edit //guest/perforce_software/sdp/main/Server/test/test_SDPEnv.py # edit |
||
| #15 | 16265 | Robert Cowham | Fix tests broken by previous few changes. | ||
| #14 | 14045 | Robert Cowham |
Move some stuff from instance_vars to p4_vars (or at least for defaults). Clarify user messages. |
||
| #13 | 13912 | C. Thomas Tyler | Pushing SDP 2015.1.13910. | ||
| #12 | 12172 | Russell C. Jackson (Rusty) | Removed p4master_run | ||
| #11 | 12115 | Russell C. Jackson (Rusty) | Dropped /p4/ssl | ||
| #10 | 12114 | Russell C. Jackson (Rusty) | Updated location of ssl directory. | ||
| #9 | 11923 | Russell C. Jackson (Rusty) | Correct the name/location of the instance vars file. | ||
| #8 | 11878 | Russell C. Jackson (Rusty) |
Corrected crontab name to p4.crontab in output when crontab's exist. Corrected test to remove p4.crontab and p4.crontab.replica - All Reb's fault. |
||
| #7 | 11830 | Russell C. Jackson (Rusty) | Updated to set MASTERINSTANCE to instance. | ||
| #6 | 11732 | Robert Cowham | Fix tests to remove crontab entries when resetting | ||
| #5 | 11707 | Robert Cowham |
Refactored sending of mail to a common function. Make the setting of "MAILFROM" work for Ubuntu (GNU Mailutils) as well as CentOS |
||
| #4 | 11545 | Robert Cowham | Fix failure to create link under ubuntu | ||
| #3 | 11544 | Robert Cowham | Tests now work with Rusty's new version | ||
| #2 | 11539 | Robert Cowham | Make tests run with alphanumeric instance names (even if with old version of SDP). | ||
| #1 | 10148 | C. Thomas Tyler | Promoted the Perforce Server Deployment Package to The Workshop. |