test_SDP.py #74

  • //
  • guest/
  • perforce_software/
  • sdp/
  • dev/
  • Server/
  • test/
  • test_SDP.py
  • View
  • Commits
  • Open Download .zip Download (34 KB)
#!/usr/bin/env python3
# -*- encoding: UTF8 -*-

# test_SDP.py
# Tests SDP (Server Deployment Package) on Linux VMs
# Intended to be run from with a Docker container.
# See documentation and run_tests.sh in /sdp/main/test/README.md

from __future__ import print_function

import argparse
import fileinput
import glob
import logging
import os
import pwd
import re
import socket
import stat
import subprocess
import sys
import time
import unittest

import P4

LOGGER_NAME = 'SDPTest'
mkdirs_script = '/hxdepots/sdp/Server/Unix/setup/mkdirs.sh'
mkdirs_config = '/hxdepots/sdp/Server/Unix/setup/mkdirs.cfg'

MAILTO = 'mailto-admin@example.com'
MAILFROM = 'mailfrom-admin@example.com'

logger = logging.getLogger(LOGGER_NAME)

options = None

class NotSudo(Exception):
    pass

def get_host_ipaddress():
    try:
        address = socket.gethostbyname(socket.gethostname())
        # On my system, this always gives me 127.0.0.1. Hence...
    except:
        address = ''
    if not address or address.startswith('127.'):
        # ...the hard way.
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        s.connect(('4.2.2.1', 0))
        address = s.getsockname()[0]
        s.detach()
    logger.debug('IPAddress: %s' % address)
    return address

def init_logging():
    global logger
    logger.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(asctime)s:%(name)s:%(levelname)s: %(message)s')
    fh = logging.FileHandler('/tmp/%s.log' % LOGGER_NAME, mode='w')
    fh.setLevel(logging.DEBUG)
    fh.setFormatter(formatter)
    logger.addHandler(fh)

def do_unlink(filename):
    "Unlink file if it exists"
    if os.path.lexists(filename):
        os.unlink(filename)

def substitute_unix_vars(line, instance, port):
    line = line.rstrip()
    if line.startswith('export MAILTO='):
        print("export MAILTO=%s" % MAILTO)
    elif line.startswith('export SSL_PREFIX=ssl:'):
        print("export SSL_PREFIX=")
    elif line.startswith('export MAILFROM='):
        print("export MAILFROM=%s" % MAILFROM)
    elif line.startswith('export P4PORTNUM='):
        print("export P4PORTNUM=%s" % port)
    elif line.startswith('export KEEPLOGS='):
        print("export KEEPLOGS=3")
    elif line.startswith('export KEEPCKPS='):
        print("export KEEPCKPS=3")
    elif line.startswith('export VERIFY_SDP_SKIP_TEST_LIST='):
        print("export VERIFY_SDP_SKIP_TEST_LIST=crontab")
    elif line.startswith('export KEEPJNLS='):
        print("export KEEPJNLS=3")
    else:
        print(line)
    
def configure_p4_vars(instance, port):
    "Configure p4_vars"
    for line in fileinput.input('/p4/common/bin/p4_vars', inplace=True):
        substitute_unix_vars(line, instance, port)

def configure_instance_vars(instance, port):
    "Configure instance vars"
    for line in fileinput.input('/p4/common/config/p4_%s.vars' % instance, inplace=True):
        substitute_unix_vars(line, instance, port)

class SDPTest_base(unittest.TestCase):
    "Generic test class for others to inherit"

    def assertLinePresent(self, line, output):
        "Asserts regex line present in output"
        re_line = re.compile(line, re.MULTILINE)
        self.assertTrue(re_line.search(output), "%s not found in:\n%s" % (line, output))

    def assertLineNotPresent(self, line, output):
        "Asserts regex line NOT present in output"
        re_line = re.compile(line, re.MULTILINE)
        self.assertFalse(re_line.search(output), "%s found in:\n%s" % (line, output))
 
    def setup_everything(self):
        if 'perforce' != pwd.getpwuid(os.getuid())[0]:
            raise Exception("This test harness should be run as user 'perforce'")
        try:
            subprocess.check_call("sudo ls > /dev/null", shell=True, timeout=20)
        except Exception:
            raise NotSudo("This test harness must be run as user perforce with sudo privileges or it will not work")

    def setUp(self):
        self.setup_everything()

    def run_test(self):
        pass

    def run_cmd(self, cmd, dir=".", get_output=True, timeout=35, stop_on_error=True):
        "Run cmd logging input and output"
        output = ""
        try:
            logger.debug("Running: %s" % cmd)
            if get_output:
                p = subprocess.Popen(cmd, cwd=dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, shell=True)
                output, _ = p.communicate(timeout=timeout)
                rc = p.returncode
                logger.debug("Output:\n%s" % output)
            else:
                result = subprocess.check_call(cmd, stderr=subprocess.STDOUT, shell=True, timeout=timeout)
                logger.debug('Result: %d' % result)
        except subprocess.CalledProcessError as e:
            logger.debug("Output: %s" % e.output)
            if stop_on_error:
                msg = 'Failed sudo_cmd: %d %s' % (e.returncode, str(e))
                logger.debug(msg)
                self.fail(msg)
        except Exception as e:
            logger.debug("Output: %s" % output)
            if stop_on_error:
                msg = 'Failed sudo_cmd: %s' % str(e)
                logger.debug(msg)
                self.fail(msg)
        return output

    def sudo_cmd(self, cmd, dir=".", get_output=True, stop_on_error=True):
        "Run cmd with sudo"
        output = self.run_cmd("sudo %s" % cmd, dir=dir, get_output=get_output, stop_on_error=stop_on_error)
        return output

    def configure_mkdirs(self, instance):
        "Configure mkdirs.cfg with a couple of key variables"
        ipaddr = get_host_ipaddress()
        for line in fileinput.input(mkdirs_config, inplace=True):
            line = line.rstrip()
            if line.startswith('P4MASTERHOST'):
                print("P4MASTERHOST=%s" % ipaddr)
            elif line.startswith('P4MASTER='):
                print("P4MASTER=%s" % ipaddr)
            elif line.startswith('P4ADMINPASS'):
                print("P4ADMINPASS=Password1")
            elif line.startswith('MASTERINSTANCE'):
                print("MASTERINSTANCE=%s" % instance)
            elif line.startswith('HOST_IP'):
                print("HOST_IP=%s" % ipaddr)
            elif line.startswith('DB1'):
                print("DB1=hxmetadata")
            elif line.startswith('DB2'):
                print("DB2=hxmetadata")
            else:
                print(line)

    def download_binaries(self):
        "Download Perforce Helix binaries."
        output = self.sudo_cmd("./get_helix_binaries.sh", dir="/hxdepots/sdp/helix_binaries")
        re_line = re.compile("Downloading of Helix binaries completed OK", re.MULTILINE)
        if re_line.search(output):
            return True
        else:
            return False

    def disable_email(self):
        "Disable SDPMAIL setting as test environment has no email server."
        self.sudo_cmd("mv /p4/common/bin/p4_vars /p4/common/bin/p4_vars.bak")
        self.sudo_cmd("sed s:SDPMAIL=:SDPMAIL='echo mail':g /p4/common/bin/p4_vars.bak > /p4/common/bin/p4_vars")
        self.sudo_cmd("chown perforce:perforce /p4/common/bin/p4_vars")

    def run_mkdirs(self, instance, testArg=""):
        "Runs the mkdirs script"
        self.sudo_cmd("mkdir /hxmetadata")  # don't use /hxmetadata1 (or 2)
        cmd = "%s %s %s" % (mkdirs_script, instance, testArg)
        output = self.sudo_cmd(cmd, dir="/hxdepots/sdp/Server/Unix/setup")
        valid_lines = ["Verified: Running as root",
                    r"Warning: \(line: \d+\) No p4p in /hxdepots/sdp/Server/Unix/p4/common/bin",
                    r"Warning: \(line: \d+\) No p4broker in /hxdepots/sdp/Server/Unix/p4/common/bin",
                    "Appending configuration section .*Maintenance/maintenance.cfg",
                    "Verified: Preflight checks passed.",
                    "Setting permissions on depot files - this may take some time \.\.\.",
                    "Setting ownership on depot files - this may take some time \.\.\.",
                    "It is recommended that the perforce's umask be changed to 0026 to block world access to Perforce files\.",
                    "Add umask 0026 to perforce's \.bash_profile to make this change\.",
                    "\*+  \-test specified \- will install to /tmp/p4  \*+",
                    "This was done in TEST mode \- please run the following command to see any changes should be",
                    "applied to your live environment \(manually\):",
                    "diff \-r /p4/1/bin /tmp/p4/1/bin",
                    "diff \-r /p4/master/bin /tmp/p4/master/bin",
                    "diff \-r /p4/common /tmp/p4/common",
                    "If upgrading an older SDP version then be careful to ensure files in /p4/common/config are correct",
                    r"Log is: .*",
                    r"Started mkdirs.sh v.*",
                    r".*/mkdirs.sh",
                    r"Loading config file: .*/mkdirs.cfg",
                    r"Overriding SDP setting in test mode to.*",
                    "and update that /p4/common/bin/p4_vars is appropriate\."]
        re_lines = [re.compile(x) for x in valid_lines]
        for line in output.split('\n'):
            line = line.strip()
            if not line:
                continue
            found = False
            for re_line in re_lines:
                if re_line.search(line):
                    found = True
                    break
            # mkdirs.sh is now more pedantic, making maintenance of unexpected extra lines
            # impractical.
            ###if not found:
            ###    self.fail('Unexpected line in mkdirs output:\n%s\noutput:\n%s' % (line, output))

    def readLog(self, log_name, instance):
        "Read the appropriate log file contents"
        with open('/p4/%s/logs/%s' % (instance, log_name), 'r') as fh:
            log_contents = fh.read()
        return log_contents

#--- Test Cases

class configure_master(SDPTest_base):

    def check_dirs(self, rootdir, dirlist):
        "Checks specified directories are present"
        found_dirs = self.run_cmd("find %s -type d" % rootdir, stop_on_error=False).split()
        for d in [x.strip() for x in dirlist.split()]:
            self.assertIn(d, found_dirs)

    def check_links(self, rootdir, linklist):
        "Checks specified links are present"
        found_links = self.run_cmd("find %s -type l" % rootdir, stop_on_error=False).split()
        for link in [x.strip() for x in linklist.split()]:
            self.assertIn(link, found_links)

    def check_dirs_exactly(self, rootdir, dirlist):
        "Checks specified directories and only those are present"
        found_dirs = self.run_cmd("find %s -type d" % rootdir, stop_on_error=False).split()
        dirs = [x.strip() for x in dirlist.split()]
        for d in dirs:
            self.assertIn(d, found_dirs)
        for d in found_dirs:
            self.assertIn(d, dirs)

    def p4service(self, cmd, instance, stop_on_error=True):
        "Start or stop service"
        self.run_cmd("/p4/%s/bin/p4d_%s_init %s" % (instance, instance, cmd), get_output=False, stop_on_error=stop_on_error)

    def remove_test_dirs(self, instances):
        "Remove all appropriate directories created"
        dirs_to_remove = "/hxdepots/sdp /hxdepots/p4 /hxmetadata/p4 /hxmetadata1/p4 /hxmetadata2/p4 /hxlogs/p4".split()
        for instance in instances:
            dirs_to_remove.append("/p4/%s" % instance)
        for d in dirs_to_remove:
            if os.path.exists(d):
                self.sudo_cmd("rm -rf %s" % d)
        for instance in instances:
            for f in ["/p4/common"]:
                if os.path.lexists(f):
                    self.sudo_cmd("unlink %s" % f)

    def liveCheckpointTest(self, instance):
        "Test live checkpoint script"
        self.assertFalse(os.path.exists('/p4/%s/offline_db/db.domain' % instance))
        self.run_cmd('/p4/common/bin/live_checkpoint.sh %s' % instance)
        # Quick check on c=log file contents
        log_contents = self.readLog('checkpoint.log', instance)
        self.assertRegex(log_contents, "Checkpointing to /p4/%s/checkpoints/p4_%s.ckp" % (instance, instance))
        self.assertRegex(log_contents, "journal")
        # Make sure offline db is present
        self.assertTrue(os.path.exists('/p4/%s/offline_db/db.domain' % instance))

    def recreateOfflineDBTest(self, instance):
        "Test recreate_offline_db script"
        self.assertTrue(os.path.exists('/p4/%s/offline_db/db.domain' % instance))
        self.sudo_cmd("rm -rf /p4/%s/offline_db/db.*" % instance)
        self.run_cmd('/p4/common/bin/recreate_offline_db.sh %s' % instance)
        # Quick check on log file contents
        logPattern = '/p4/%s/logs/recreate_offline_db.log.*' % instance
        logfiles = glob.glob(logPattern)
        self.assertEqual(1, len(logfiles))
        log_contents = self.readLog(os.path.basename(logfiles[0]), instance)
        self.assertRegex(log_contents, "Start p4_%s recreate of offline db" % (instance))
        self.assertRegex(log_contents, "Recovering from /p4/%s/checkpoints/p4_%s.ckp" % (
            instance, instance))
        # Make sure offline db is present
        self.assertTrue(os.path.exists('/p4/%s/offline_db/db.domain' % instance))

    def failedDailyBackupTest(self, instance):
        "Test daily backup script - expected to fail due to lack of offline db"
        logger.debug("failedDailyBackupTest")
        jnl_counter = self.p4run('counter', 'journal')[0]['value']
        self.run_cmd('/p4/common/bin/daily_checkpoint.sh %s' % instance, stop_on_error=False)
        # Quick check on c=log file contents
        log_contents = self.readLog('checkpoint.log', instance)
        self.assertRegex(log_contents, "Offline database not in a usable state")
        new_jnl_counter = self.p4run('counter', 'journal')[0]['value']
        self.assertEqual(int(new_jnl_counter), int(jnl_counter))

    def dailyBackupTest(self, instance):
        "Test daily backup script"
        jnl_counter = self.p4run('counter', 'journal')[0]['value']
        logger.debug("dailyBackupTest")
        self.run_cmd('/p4/common/bin/daily_checkpoint.sh %s' % instance)
        # Quick check on log file contents
        log_contents = self.readLog('checkpoint.log', instance)
        self.assertRegex(log_contents, "Dumping to /p4/%s/checkpoints/p4_%s.ckp" % (instance, instance))
        self.assertRegex(log_contents, "journal")
        new_jnl_counter = self.p4run('counter', 'journal')[0]['value']
        self.assertEqual(int(new_jnl_counter), int(jnl_counter) + 1)

    def rotateJournalTest(self, instance):
        "Test rotate_journal.sh script"
        self.run_cmd('/p4/common/bin/rotate_journal.sh %s' % instance)
        # Quick check on log file contents
        log_contents = self.readLog('checkpiont.log', instance)
        self.assertRegex(log_contents, "End p4_%s journal rotation" % instance)

    def loadCheckpointTest(self, instance):
        "Test load_checkpoint.sh script"
        logger.debug("loadCheckpointTest")
        self.run_cmd('/p4/common/bin/load_checkpoint.sh /p4/%s/checkpoints/p4_%s.ckp.8.gz -i %s -y -l -si -L /p4/%s/logs/load_checkpoint.log' % (instance, instance, instance, instance))
        # Quick check on log file contents
        log_contents = self.readLog('load_checkpoint.log', instance)
        self.assertRegex(log_contents, "Checkpoint load processing took")

    def refreshP4ROOTFromOfflineDBTest(self, instance):
        "Test refresh_P4ROOT_from_offline_db.sh script"
        logger.debug("refreshP4ROOTFromOfflineDBTest")
        self.run_cmd('/p4/common/bin/refresh_P4ROOT_from_offline_db.sh %s' % instance)
        # Quick check on log file contents
        log_contents = self.readLog('refresh_P4ROOT_from_offline_db.log', instance)
        self.assertRegex(log_contents, "End p4_%s Refresh P4ROOT from offline_db" % instance)

    def verifyTest(self, instance):
        "Test verify script"
        logger.debug("verifyTest")
        verify_cmd = '/p4/common/bin/p4verify.sh %s' % instance
        verify_recent_cmd = '/p4/common/bin/p4verify.sh %s -recent' % instance
        self.run_cmd(verify_cmd)
        log_contents = self.readLog('p4verify.log', instance)

        for depot in ["depot", "specs"]:
            verify_ok = re.compile("verify -qz //%s/...\nexit: 0" % depot, re.MULTILINE)
            self.assertRegex(log_contents, verify_ok)

        # Make sure we check for shelves in the 'depot' depot.
        verify_ok = re.compile("verify -qS //depot/...\nexit: 0", re.MULTILINE)

        # Check that we use '-U' for unload depot, and do not do -qS (as there are no shelves
        # in an unload depot). Also, we expect errors about their not being any files in the
        # unload depot.  Note that, depending on the p4d version, this may show as 'error:'
        # or 'warning:'; we accept either here.
        verify_ok = re.compile(r"verify -U -q //unload/...\n(error|warning): //unload/... - no such unloaded.*\nexit: 0", re.MULTILINE)
        self.assertRegex(log_contents, verify_ok)

        # Streams depot doesn't have any files so gives an error - we just search for it
        verify_ok = re.compile(r"verify -qz //streams/...\n(error|warning): //streams/... - no such.*\nexit: 0", re.MULTILINE)
        self.assertRegex(log_contents, verify_ok)

        # Now create verify errors and make sure we see them
        orig_depot_name = '/p4/%s/depots/depot' % instance
        new_depot_name = orig_depot_name + '.new'
        os.rename(orig_depot_name, new_depot_name)

        self.run_cmd(verify_cmd, stop_on_error=False)
        log_contents = self.readLog('p4verify.log', instance)

        for depot in ["depot"]:
            verify_ok = re.compile("verify -qz //%s/...\nerror: [^\n]*MISSING!\nexit: 1" % depot, re.MULTILINE)
            self.assertRegex(log_contents, verify_ok)
        # Rename things back again and all should be well!
        os.rename(new_depot_name, orig_depot_name)
        time.sleep(1)    # Make sure verify log is OK.

        self.run_cmd(verify_cmd, stop_on_error=True)
        log_contents = self.readLog('p4verify.log', instance)
        for depot in ["depot", "specs"]:
            verify_ok = re.compile("verify -qz //%s/...\nexit: 0" % depot, re.MULTILINE)
            self.assertRegex(log_contents, verify_ok)

        time.sleep(1)    # Make sure verify log is OK.
        self.run_cmd(verify_recent_cmd, stop_on_error=False)
        log_contents = self.readLog('p4verify.log', instance)
        verify_ok = re.compile("changelists per depot due to -recent", re.MULTILINE)
        self.assertRegex(log_contents, verify_ok)

    def verifyVerify(self, instance):
        "Test verify_sdp.sh script"
        logger.debug("verifyVerify")
        verify_cmd = '/p4/common/bin/verify_sdp.sh %s -skip cron' % instance
        self.run_cmd(verify_cmd)
        # Quick check on log file contents
        log_contents = self.readLog('verify_sdp.log', instance)
        self.assertRegex(log_contents, "NO ERRORS: \d+ verifications completed, with 1 warnings detected.")

    def verifyJournalCorruption(self, instance):
        "Test server restart detetcts journal corruption"
        logger.debug("verifyJournalCorruption")
        self.p4service("stop", instance)
        self.p4service("start", instance)
        self.p4.disconnect()
        self.connectP4(self.p4)
        jnl_counter = self.p4run('counter', 'journal')[0]['value']
        log_contents = self.readLog('p4d_init.log', instance)
        self.assertNotRegex(log_contents, "Error: possible corruption at end of journal detected")
        fname = '/p4/%s/logs/journal' % instance
        
        # Journal with an invalid initial line - testing for when tail -10000 happens to chop a record in half
        self.p4service("stop", instance)
        with open(fname, 'w') as fh:
            fh.write("""@ @//some/path@ @@ 1 
@ex@ 31884 1605660325
@rv@ 7 @db.user@ @z_build@ @z_build@@example.com@ @@ 1511013611 1605660070 @z_build@ @C2999B31D3A83F4F6651DAB32FAB0861@ 1 @99C5A122A727E54C327E0B3286346F00@ 2147483647 0 1511072211 0 0 0 
@ex@ 31910 1605660327
""")
        self.p4service("start", instance)
        self.p4.disconnect()
        self.connectP4(self.p4)
        jnl_counter = self.p4run('counter', 'journal')[0]['value']
        log_contents = self.readLog('p4d_init.log', instance)
        self.assertNotRegex(log_contents, "Error: possible corruption at end of journal detected")

        # Totally invalid journal test
        self.p4service("stop", instance)
        with open(fname, 'a') as fh:
            fh.write('corruption journal data\n')
        self.p4service("start", instance)
        self.p4.disconnect()
        self.connectP4(self.p4)
        # Quick check on log file contents
        log_contents = self.readLog('p4d_init.log', instance)
        self.assertRegex(log_contents, "Error: possible corruption at end of journal detected")
        new_jnl_counter = self.p4run('counter', 'journal')[0]['value']
        self.assertEqual(int(new_jnl_counter), int(jnl_counter) + 1)

    def configureServer(self, instance):
        "Set various configurables for master"
        configurables = """
            security=3
            auth.id=p4_auth
            run.users.authorize=1
            db.peeking=2
            dm.user.noautocreate=2
            dm.user.resetpassword=1
            filesys.P4ROOT.min=1G
            filesys.depot.min=1G
            filesys.P4JOURNAL.min=1G
            server=3
            net.tcpsize=256k
            lbr.bufsize=256k
            server.commandlimits=2
            serverlog.retain.3=7
            serverlog.retain.7=7
            serverlog.retain.8=7""".split("\n")
        instance_configurables = """
            journalPrefix=/p4/SDP_INSTANCE/checkpoints/p4_SDP_INSTANCE
            server.depot.root=/p4/SDP_INSTANCE/depots
            serverlog.file.3=/p4/SDP_INSTANCE/logs/errors.csv
            serverlog.file.7=/p4/SDP_INSTANCE/logs/events.csv
            serverlog.file.8=/p4/SDP_INSTANCE/logs/integrity.csv""".split("\n")
        for c in [x.strip() for x in configurables]:
            if c:
                self.p4run("configure", "set", c)
        for ic in instance_configurables:
            ic = ic.strip()
            if ic:
                self.p4run("configure", "set", ic.replace("SDP_INSTANCE", instance))

    def configureReplication(self):
        "Configures stuff required for replication"

    def p4run(self, *args):
        "Run the command logging"
        logger.debug('p4 cmd: %s' % ",".join([str(x) for x in args]))
        result = self.p4.run(args)
        logger.debug('result: %s' % str(result))
        return result

    def resetTest(self, instances):
        for instance in instances:
            self.sudo_cmd("ps -ef | grep p4d_%s | awk '{print $2}' | xargs kill > /dev/null 2>&1" % instance, stop_on_error=False)
        self.remove_test_dirs(instances)
        self.sudo_cmd("cp -R /sdp /hxdepots/sdp")
        self.sudo_cmd("rm -rf /tmp/p4")
        self.sudo_cmd("sudo chown -R perforce:perforce /hxdepots/sdp")
        for f in ["/p4/p4.crontab", "/p4/p4.crontab.replica", "/p4/p4.crontab.edge",
                  "/tmp/p4/p4.crontab", "/tmp/p4/p4.crontab.replica", "/tmp/p4/p4.crontab.edge"]:
            if os.path.exists(f):
                os.remove(f)
        for instance in instances:
            d = "/p4/%s" % instance
            if os.path.exists(d):
                self.sudo_cmd("rm -rf %s" % d)
            if os.path.exists(d.lower()):
                self.sudo_cmd("rm -rf %s" % d.lower())

    def mkdirsTest(self, instance):
        "Runs mkdirs with -test option and makes sure all is OK"
        # Stop the Perforce service if currently running from a previous run in case it is accessing dirs
        self.resetTest(instance)
        self.configure_mkdirs(instance)
        self.download_binaries()
        self.run_mkdirs(instance, "-test")
        self.disable_email()
        # Check dirs are empty
        self.check_dirs('/tmp/hxmounts/hxdepots', '/tmp/hxmounts/hxdepots')
        self.check_dirs('/tmp/hxmounts/hxlogs', '/tmp/hxmounts/hxlogs')
        self.check_dirs('/tmp/hxmounts/hxmetadata', '/tmp/hxmounts/hxmetadata')

        link_list = """
            /tmp/p4/common"""
        self.check_links('/tmp/p4', link_list)

        self.check_dirs('/tmp/p4', '/tmp/p4')

        dir_list = """
            /tmp/hxmounts/hxdepots/p4/common/bin
            /tmp/hxmounts/hxdepots/p4/common/bin/triggers
            /tmp/hxmounts/hxdepots/p4/common/lib
            /tmp/hxmounts/hxdepots/p4/SDP_INSTANCE/checkpoints
            /tmp/hxmounts/hxdepots/p4/SDP_INSTANCE/depots""".replace("SDP_INSTANCE", instance)
        self.check_dirs('/tmp/hxmounts/hxdepots', dir_list)
        dir_list = """
            /tmp/p4/SDP_INSTANCE/bin""".replace("SDP_INSTANCE", instance)
        self.check_dirs('/tmp/p4/SDP_INSTANCE/'.replace("SDP_INSTANCE", instance), dir_list)

    def configureInstance(self, instance, port):
        "Configure the master instance"
        # Stop the Perforce service if currently running from a previous run in case it is accessing dirs
        self.resetTest(instance)
        self.configure_mkdirs(instance)
        self.download_binaries()
        self.run_mkdirs(instance)
        self.disable_email()
        depotdata_dir_list = """
            /hxdepots/p4
            /hxdepots/p4/common
            /hxdepots/p4/common/bin
            /hxdepots/p4/common/bin/triggers
            /hxdepots/p4/common/lib
            /hxdepots/p4/SDP_INSTANCE
            /hxdepots/p4/SDP_INSTANCE/depots
            /hxdepots/p4/SDP_INSTANCE/checkpoints""".replace("SDP_INSTANCE", instance)
        logdata_dir_list = """
            /hxlogs
            /hxlogs/p4
            /hxlogs/p4/SDP_INSTANCE
            /hxlogs/p4/SDP_INSTANCE/tmp
            /hxlogs/p4/SDP_INSTANCE/logs""".replace("SDP_INSTANCE", instance)
        metadata_dir_list = """
            /hxmetadata
            /hxmetadata/p4
            /hxmetadata/p4/SDP_INSTANCE
            /hxmetadata/p4/SDP_INSTANCE/db1
            /hxmetadata/p4/SDP_INSTANCE/db1/save
            /hxmetadata/p4/SDP_INSTANCE/db2
            /hxmetadata/p4/SDP_INSTANCE/db2/save""".replace("SDP_INSTANCE", instance)
        p4_link_list = """
            /p4/SDP_INSTANCE/root
            /p4/SDP_INSTANCE/offline_db
            /p4/SDP_INSTANCE/checkpoints
            /p4/SDP_INSTANCE/depots
            /p4/SDP_INSTANCE/logs
            /p4/SDP_INSTANCE/tmp""".replace("SDP_INSTANCE", instance)
        p4_dir_list = """
            /p4/SDP_INSTANCE/bin""".replace("SDP_INSTANCE", instance)
        self.check_dirs('/hxdepots', depotdata_dir_list)
        self.check_dirs('/hxlogs', logdata_dir_list)
        self.check_dirs('/hxmetadata', metadata_dir_list)
        self.check_dirs('/p4', p4_dir_list)
        self.check_links('/p4/SDP_INSTANCE'.replace("SDP_INSTANCE", instance), p4_link_list)
        configure_instance_vars(instance, port)
        configure_p4_vars(instance, port)

    def assertLogCount(self, expected, logPattern):
        logger.debug("Looking for logs: %s" % logPattern)
        logs = glob.glob(logPattern)
        self.assertEqual(3, len(logs))

    def configure_master_p4d_instance(self, p4, instance):
        # Create our user and set password
        logger.debug('Creating user and setting password')

        # Create our user and set password
        user = p4.fetch_user('perforce')
        p4.save_user(user)
        p4.run_password('Password1', 'Password1')
        p4.password = 'Password1'
        p4.run_login()
        # Make him superuser
        prot = p4.fetch_protect()
        p4.save_protect(prot)

        # Things to setup
        # - create spec depot
        # - create a workspace and add at least one file
        # - configure the various tunables
        # - create server definitions - master and replica
        # - create service user for replica
        p4.run('configure', 'set', 'server.depot.root=/p4/%s/depots' % instance)
        p4.run('admin', 'restart')
        p4.disconnect() # New depot won't show up unless we do this 
        time.sleep(1)
        self.connectP4(p4)
        if instance == 'master':
            if not os.path.lexists("/p4/%s" % instance.lower()):
                self.run_cmd("ln -s /p4/%s /p4/%s" % (instance, instance.lower()))
		
        depot = p4.fetch_depot('specs')
        self.assertEqual(depot['Map'], 'specs/...')
        depot['Type'] = 'spec'
        p4.save_depot(depot)

        depot = p4.fetch_depot('unload')
        self.assertEqual(depot['Map'], 'unload/...')
        depot['Type'] = 'unload'
        p4.save_depot(depot)

        depot = p4.fetch_depot('archive')
        self.assertEqual(depot['Map'], 'archive/...')
        depot['Type'] = 'archive'
        p4.save_depot(depot)

        depot = p4.fetch_depot('streams')
        self.assertEqual(depot['Map'], 'streams/...')
        depot['Type'] = 'stream'
        p4.save_depot(depot)

        p4.disconnect() # New depot won't show up unless we do this 
        self.connectP4(p4)

        depots = p4.run_depots()
        self.assertEqual(5, len(depots))

        ws_name = 'test_ws'
        ws = p4.fetch_client(ws_name)
        ws['Root'] = '/tmp/test_ws'
        ws['View'] = ['//depot/main/... //%s/...' % ws_name]
        p4.save_client(ws)
        p4.client = ws_name

        if not os.path.exists(ws['Root']):
            os.mkdir(ws['Root'])
        fname = '/tmp/%s/file1' % ws_name
        if os.path.exists(fname):
            os.chmod(fname, stat.S_IWRITE)
            os.unlink(fname)
        with open(fname, 'w') as fh:
            fh.write('test data\n')
        p4.run_add(fname)
        chg = p4.fetch_change()
        chg['Description'] = 'Initial file'
        p4.save_submit(chg)

        changes = p4.run_changes()
        self.assertEqual(1, len(changes))

    def connectP4(self, p4):
        # Connect with retries
        retries = 10
        for i in range(retries):
            try:
                p4.connect()
                break
            except:
                logger.debug("sleeping to connect - retry %d" % i)
                time.sleep(1)
        if not p4.connected():
            self.fail("failed to connect to server after %d retries" % retries)

    def instanceSetup(self, instance, port, test_daily=False):
        # Start instance and do the basics
        self.p4service("start", instance)

        p4 = P4.P4()
        self.p4 = p4
        p4.port = 'localhost:%s' % port
        p4.user = 'perforce'
        self.connectP4(p4)

        # Create our user and set password
        user = p4.fetch_user('perforce')
        p4.save_user(user)
        p4.run_password('', 'Password1')
        p4.password = 'Password1'
        p4.run_login()
        # Make him superuser
        prot = p4.fetch_protect()
        p4.save_protect(prot)

        self.configure_master_p4d_instance(p4, instance)
        self.configureServer(instance)

        if test_daily:
            # Following 2 tests should fail due to lack of offline_db
            self.failedDailyBackupTest(instance)
            time.sleep(1)
            # self.recreateDBFromOfflineTest(instance)
            time.sleep(1)
        self.liveCheckpointTest(instance)
        time.sleep(1)
        self.verifyVerify(instance)

    def instanceTest(self, instance, port):
        "Perforce complete tests"

        ckpLogPattern = '/p4/%s/logs/checkpoint.log*' % instance
        logPattern = '/p4/%s/logs/log*' % instance

        self.instanceSetup(instance, port, test_daily=True)
        
        # Now the offline_db should exist
        # Run enough times to ensure logs get removed - (KEEPCKPS + 1)
        self.dailyBackupTest(instance)
        self.assertLogCount(3, ckpLogPattern)
        time.sleep(1)
        self.dailyBackupTest(instance)
        self.assertLogCount(3, ckpLogPattern)
        time.sleep(1)
        self.dailyBackupTest(instance)
        self.assertLogCount(3, ckpLogPattern)
        time.sleep(1)
        # Manually rotate journals again and ensure daily backup handles that
        self.p4run('admin', 'journal', '/p4/%s/checkpoints/p4_%s' % (instance, instance))
        self.dailyBackupTest(instance)
        self.assertLogCount(3, ckpLogPattern)
        time.sleep(1)
        
        self.verifyTest(instance)

        print('\n\nAbout to run recreate db from offline which sleeps for 30 seconds, so be patient...!')
        self.dailyBackupTest(instance)
        self.assertLogCount(3, ckpLogPattern)
        self.assertLogCount(3, logPattern)
        
        # Delete offline_db and check we can recreate
        # self.recreateOfflineDBTest(instance)
        self.assertLogCount(3, ckpLogPattern)
        self.assertLogCount(3, logPattern)
        time.sleep(1)
        self.dailyBackupTest(instance)
        # Note Daily doesn't increase the journal number so there are 2 with latest
        self.assertLogCount(3, ckpLogPattern)
        self.assertLogCount(3, logPattern)

        self.verifyVerify(instance)

        self.refreshP4ROOTFromOfflineDBTest(instance)

        self.verifyJournalCorruption(instance)
        # print(p4.run_admin('stop'))

        self.loadCheckpointTest(instance)

    def runTest(self):
        "Configure the master instance"
        all_instances = {"1": "1667",
                         "master": "2667"}
        if options.instance and options.instance in all_instances:
            instances = {options.instance: all_instances[options.instance]}
        else:
            instances = all_instances
        for instance, port in instances.items():
            self.resetTest(all_instances.keys())
            self.mkdirsTest(instance)
            self.configureInstance(instance, port)
            if options.setup:
                self.instanceSetup(instance, port)
            else:
                self.instanceTest(instance, port)

if __name__ == "__main__":
    init_logging()
    parser = argparse.ArgumentParser(add_help=False)
    parser.add_argument('--instance', default=None)
    parser.add_argument('--setup', action='store_true')
    options, args = parser.parse_known_args()

    testrunner = None
    unittest.main(testRunner=testrunner, argv=sys.argv[:1] + args)
# Change User Description Committed
#109 31037 Robert Cowham Fix failing tests due to lack of helix binaries.
#108 31036 Robert Cowham Fix passing of default version to run
#107 31032 C. Thomas Tyler Re-added the assertion in test_Upgrade.sh.
Added version parameter to test_SDP.py; current default is 24.2.
#106 30905 Robert Cowham Increase timeout to get_helix_binaries to 2mins
#105 30879 Robert Cowham Update tests to use systemctl (remove hack) and fix some env problems.
#104 30859 C. Thomas Tyler Adapted test suite to changes in expected script logs.

Added setting filesys.P4LOG.min=10M to increase useful lifespan of
minimally provisioned Docker/Podman test instances.
#103 30841 C. Thomas Tyler Updated test for parallel checkpoints.
#102 30356 C. Thomas Tyler Added Rocky 9 and Ubuntu 22 to test suite.

Dropped CentOS 6.

Changed default OS for tests from CentOS 7 to Rocky 9.

Adapted to using podman for the test suite.

Reduced minimum disk space requirement for testing
from 1G to 10M.

To be revisited:
* Disabled OOM killer defenese feature and systemd due to
compatibility issues with the test environment.
* Disabled systemd by moving systemctl aside

Enhanced build_docker_image.sh; added '-clean' option and added
support for more platforms.

Updated OS-specific packages as needed for all platforms, e.g.
adding 'file' and 'rsync' utils as needed in Docker definitions.

Added minimum performance recommendations for test containers in
config.

Updated env.sh utilities.

This change test on:
[X] centos7
[X] rocky8
[X] rocky9
[X] ubuntu20
[X] ubuntu22

#review-30357 @robert_cowham

TO DO:
* This builds using the official SDP test suite run manually on
Tom's laptop. Next step: Get it working on our shiny new Jenkins
Rocky 9 server machine.
#101 29431 C. Thomas Tyler In test_SDP.py, adjusted expected output for parallel checkpoints.
#100 29315 C. Thomas Tyler Fixed the mkrepTest test.
#99 29314 C. Thomas Tyler More test suite fixes.
#98 29313 C. Thomas Tyler Fixed another syntax issue in test_SDP.py.
#97 29310 C. Thomas Tyler Fixed bug in test suite.
#96 29303 C. Thomas Tyler mkrep.sh v3.1.0:
* Added infer_type_tag() function with logic to determine if P4TARGET is
filtered or not.
* Discouraged use of 'ro' and 'rom' types (as they don't support 'p4 failover').
* Clarified usage to indicate daisy chaining is acceptable from
forwarding replicas.
* Added preflight check to avoid accidental overwrite of existing server spec.
* Added '-os' option to overwrite existing server spec intentionally.
* Re-added test for mkrep.sh to default test suite after fixing the test.
* Added '-N' option to fully support server spec naming standard that allows
for things like 'p4d_edge2_syd' and 'p4d_edge3_syd'. Added docs for same.

#review-29304
#95 29249 C. Thomas Tyler Enhanced regression tests for parallel checkpoint.
#94 29232 C. Thomas Tyler Added regression test for new parallel checkpoint feature.

#review-29220
#93 29228 C. Thomas Tyler Enhanced reliability of p4verify tests, addressing timing issues
related to log rotation.
#92 29201 C. Thomas Tyler Test suite order-of-operations tweaks.
#91 29200 C. Thomas Tyler Adjusted log path.
#90 29199 C. Thomas Tyler Test suite tweaks.
#89 29198 C. Thomas Tyler Moved new load_checkpoint.sh test which tries to load the journal so
it runs BEFORE the journal corruption test that corrupts the journal.

D'oh!

Also corrected outdated output in test suite comments.
#88 29197 C. Thomas Tyler Test suite adjustments.

Attempted fixed gnarly Python test suite timing issue with p4verify.log indicating:
sys:1: ResourceWarning: unclosed file <_io.TextIOWrapper name=5 encoding='ANSI_X3.4-1968'>
(Seeing this in my local Docker environment only; the official test suite gets
past this with no issues).

Add an explicit file close() call in the function that reads log contents, as the log
is rotate too quickly for it to close naturally.

Adjusted checkpoint number to try to fix journal sequence issue in test for
load_checkpoint.sh.
#87 29196 C. Thomas Tyler Corrected displayed function name in error otuput.

Attempted fix out out-of-sequence error by changing order of
tests.
#86 29194 C. Thomas Tyler Fixed typo in test suite code.
#85 29193 C. Thomas Tyler load_checkpoint.sh v2.7.0:

* If live P4JOURNAL is specified to replay, don't move it.
* Added '-k' option to keep rather than delete db.* files.

Updated test case to exercise new new options.

With these changes, this script, originally focused on
loading checkpoints on replicas/edge servers, is now
also suited to support recovery.

See also: SDP-582, for a separate script focused
on recovery scenarios, which can call this script as needed.

#review-29164 @robert_cowham
#84 28842 C. Thomas Tyler Test suite: Adjusted expected output of tests related to p4verify.sh.
#83 28417 C. Thomas Tyler Added new tests functions mkrepTest() and edgeDumpTest()
to test mkrep.sh and edge_dump.sh, respectively.

These new fuctions need to be called and may require
further revision.
#82 28213 C. Thomas Tyler Fixed broken test.
#81 27991 C. Thomas Tyler Fixed typo.
#80 27990 C. Thomas Tyler In test_SDP.py, adjusted disable_p4p() and disable_email() to
account for scenario where 'mkdirs.sh' was run with '-test'.

Also added a failsafe so if files to be affected are not found,
simply return to avoid impacting tests, as these functions are
mere support functions and not actual tests.
#79 27722 C. Thomas Tyler Refinements to @27712:
* Resolved one out-of-date file (verify_sdp.sh).
* Added missing adoc file for which HTML file had a change (WorkflowEnforcementTriggers.adoc).
* Updated revdate/revnumber in *.adoc files.
* Additional content updates in Server/Unix/p4/common/etc/cron.d/ReadMe.md.
* Bumped version numbers on scripts with Version= def'n.
* Generated HTML, PDF, and doc/gen files:
  - Most HTML and all PDF are generated using Makefiles that call an AsciiDoc utility.
  - HTML for Perl scripts is generated with pod2html.
  - doc/gen/*.man.txt files are generated with .../tools/gen_script_man_pages.sh.

#review-27712
#78 27270 C. Thomas Tyler int/str tweak.
#77 27269 C. Thomas Tyler Tweaked test suite to define required broker port number.
#76 27267 C. Thomas Tyler Test suite tweaks to pass verify_sdp.sh preflight checks:
* Changed ServerID from test_master to master.test
* Disabled proxy so /p4/N/cache is not verified.
#75 27061 Robert Cowham mkdirs.sh now respects the MASTER_ID value from mkdirs.cfg (or mkdirs.N.cfg).
#74 27022 C. Thomas Tyler Changed 'exes' to 'helix_binaries' in folder and script name, per review.
#73 26982 C. Thomas Tyler mkdirs.sh v4.1.0:
* Accounted for directory structure change of Maintenance to Unsupported.
* Added standard command line processing with '-h' and '-man' doc flags,
and other flags (all documented).
* Added in-code docs and updated AsciiDoc.
* Enhanced '-test' mode to simulate /hx* mounts.
* Enhanced preflight testing, and fixed '-test' mode installs.
* Added support for installing to an alternate root directory.
* Added '-s <ServerID>' option to override REPLICA_ID.
* Added '-S <TargetServerID>' used for replicas of edge servers.
* Added '-t <server_type>' option to override SERVER_TYPE.
* Added '-M' option to override mount points.
* Added '-f' fast option to skip big chown/chmod commands, and
moved those commands near the end as well.

verify_sdp.sh v5.9.0:
* Added check for /p4/Version file, and checked that other legacy
SDP methods of checking version
* Added sanity check for crontab.
* Added 'test skip' mechanism to skip certain tests:
 - crontab: Skip crontab check. Use this if you do not expect crontab to be configured, perhaps if a different scheduler is used.
 - license: Skip license related checks.
 - version: Skip version checks.
 - excess: Skip checks for excess copies of p4d/p4p/p4broker in PATH.
* Added VERIFY_SDP_SKIP_TEST_LIST setting ton instance_vars.template,
to define a standard way to have verify_sdp.sh always skip certain
tests for a site.
* Extended '-online' checks to check for bogus P4MASTERPORT, a common
config error.

Update test_SDP.py:
* Adjusted test suite to account for various changes in mkdirs.sh.
* Added 'dir' parameter to run_cmd() and sudo_cmd(), to run a
command from a specified directory (as required to test new
mkdirs.sh)
* Added check_links() similar to existing check_dirs() function.

=== Upgrade Process Changes ===

Made /p4/common/bin/p4d/p4/p4broker/p4p shell script rather than binary.

This changes the way SDP new binaries are staged for upgrade.  For
safety, exes are now staged to a director outside the PATH, the
/p4/sdp/exes folder. A new 'get_latest_exes.sh' script simplifies
the task of pulling executables from the Perforce FTP server. This
can be used 'as is' for environments with outbound internet access,
and is useful in any case to describe now to acquire binaries.

This addresses an issue where a p4d binary staged for a future
upgrade might be called before the actual upgrade is performed.

upgrade.sh v4.0.0:
* All preflight checks are now done first. Added '-p' to abort after preflight.
* Added '-n' to show what would be done before anything is executed.
* Minimalist logic to start/stop only servers that are upgrade, and apply
upgrades only as needed.
* Staging of exes for upgrade is now separate from /p4/common/bin
* Improved in-code docs, added '-h' and '-man' options.
* Retained pre/post P4D 2019.1 upgrade logic.
#72 26928 Robert Cowham Fix problem with line breaks when testing for journal corruption.
Also softened the error message to avoid unnecessary alarm for users!
Observed "cd -" not working on AWS Linux box. Changed to pushd/popd.
#71 26819 C. Thomas Tyler Added test for '-recent' flag to p4verify.sh.
#70 26734 Robert Cowham Fix failing tests with DB1/DB2 settings
#69 26718 Robert Cowham Rename P4MASTER to P4MASTERHOST for clarity with comments in:
- mkdirs.cfg/mkdirs.sh
- p4_<instance>.vars
- other files which reference
Remove unnecessary sed for p4p.template
#68 26477 C. Thomas Tyler Patch to rotate_journal.sh.
Added coverage for rotate_journal.sh to test suite.
#67 26464 C. Thomas Tyler Adjusted test suite order-of-tests issue.
 The DB refresh must come
before the journal corruption test, and the load_checkpoint.sh test
must come after it.
#66 26463 C. Thomas Tyler Corrected call to readLog().
in test suite.
#65 26462 C. Thomas Tyler Test tweaks.
#64 26461 C. Thomas Tyler Added new test for refresh_P4ROOT_from_offline_db.sh.
#63 26460 C. Thomas Tyler Tweaked test parameters, and add call to actually execute the new test.
#62 26459 C. Thomas Tyler test tweak.
#61 26458 C. Thomas Tyler Added test for load_checkpoint.sh, doomed to initial failure.
#60 26389 C. Thomas Tyler Adjusted expectatios of verify_sdp.sh check.
 Instead of 'ALL CLEAN',
we now expect 'NO ERRORS' and an indication of 1 warning, due to the
addition of a check for a license file that does not exist.  A missing
license file is considered a warning rather than an error.

The test suite doesn't exercise the new 'so many days left until your
license expires' check, but that can't be reliably tested without
a license file generator.
#59 26080 Robert Cowham Fix typo and failing test
#58 25970 Robert Cowham Detect and warn on journal corruption
#57 25575 Robert Cowham Updates to SDP and tests for 19.1 upgrades

#review @tom_tyler
#56 25569 Robert Cowham Fix string format error
#55 25558 Robert Cowham Add retries to all attempts to connect()
#54 25556 Robert Cowham Add a retry to avoid transient test failures
#53 25555 Robert Cowham Handle new 19.1 upgrade.sh requirements
#52 25554 Robert Cowham Automated tests for calling upgrade.sh
#51 25457 Robert Cowham Call verify_sdp.sh (with fixes to make that work)

#review @tom_tyler
#50 25252 C. Thomas Tyler Attempted fix of test suite with: s/Master/master/g
#49 25231 Robert Cowham Output line numbers on mkdirs errors
Tweak tests to cope when parsing output
#48 25190 Robert Cowham Change to make instance dir a directory under /p4 (so on root volume).

This avoids potential problems where /hxdepots is shared on multiple instances.
It also improves performance often.
Left the option to put /p4/common on a shared volume if required.
#47 25148 Robert Cowham Submit test updates
#46 25147 Robert Cowham Made -test parameter work again
and included tests to detect future breakage!
#45 25043 C. Thomas Tyler Prepared patch SDP/MultiArch/2018.1/23583.p2

Changes:
* In README, moved 'Supported Open Source Software' section
to the top of the page, to make Support position more visible.
* Copyright updated to 2019.
* Cleanup of excess script recreate_db_sync_replica.sh
* Re-removal of previously deleted script recreate_db_checkpoint.sh,
and corresponding removal from test suite.
* This patch tarball will also contain some Docker test suite updates
already staged in main.

By-passing review to trigger an automated test.
#44 24407 C. Thomas Tyler Recovering recreate_db_checkpoint.sh and undoing other changes done in
change @24393 as a quick patch/fix for SDP-341.

Also uncommenting the test for this script as the script is being
re-added.

Bypassing pre-commit review for this, as I need to commit this before
making the next set of more review-worthy changes, including renaming
this script per discussion in review:
https://swarm.workshop.perforce.com/reviews/24394/#comments
#43 24405 C. Thomas Tyler Commented out tests related to recreate_db_checkpoint.sh for now.
#42 23792 C. Thomas Tyler Removed test assertion for p4verify.s expecting to see '//archive' depot
referenced.  The verification logic avoids depots of type archive entirely.
#41 23763 C. Thomas Tyler Updated test assertions.
#40 23305 C. Thomas Tyler Ajusted test for running mkdirs.sh to acccept more verbose
mkdirs.sh output.
#39 22387 Robert Cowham Fix journal rotation off-by-one error
Also make sure that -jj rotation specifies prefix.
#38 22342 Robert Cowham Fixed hang when spawning subprocess - introduced by previous attempt to get output
#37 22249 Robert Cowham Improve error messages when tests fail for emails etc.
#36 22151 Robert Cowham Fix failing tests with new mkdirs.cfg
#35 21308 C. Thomas Tyler Attempted to fix failing tests.
#34 21267 Robert Cowham Fix failing tests.
Changed default filesystem names to:
DB1=hxmetadata1
DB2=hxmetadata2
DD=hxdepots
LG=hxlogs

hx=Helix. Updated docs to reflect this
#33 20946 Robert Cowham Fix tests
#32 20878 Robert Cowham Refactor - on the way to possible cross platform script.
#31 20435 C. Thomas Tyler Tweaked the test so it now recognizes as valid some new
output from mkdirs.sh.
#30 19851 Robert Cowham Check for usable offline_db before creating checkpoint work file.
This avoids an error right at the start locking out the utility which
will fix said error!
#29 19527 Russell C. Jackson (Rusty) Added KEEPJNLS substitution.
#28 19410 Russell C. Jackson (Rusty) Made out on setting permissions consistent in mkdirs.
Added the new output as valid in test_SDP.
#27 19114 Russell C. Jackson (Rusty) Updated names for checkpoint scripts.
#26 19111 Russell C. Jackson (Rusty) Removed instance in journal check.
#25 19109 Russell C. Jackson (Rusty) just check for text: journal
#24 19108 Russell C. Jackson (Rusty) Fix live_checkpoint test output.
#23 19107 Russell C. Jackson (Rusty) Put fix in correct test for new journal rotation.
#22 19106 Russell C. Jackson (Rusty) Change to handle different output on journal rotation with new command.
#21 18936 C. Thomas Tyler De-emphasizing changeType=restricted, including deprecating
associated triggers.
#20 18623 Russell C. Jackson (Rusty) Account for new crontab file in cleanup.
#19 18600 Russell C. Jackson (Rusty) Change number of expected logs to 3.
#18 18595 Russell C. Jackson (Rusty) Fixed a log rotation bug that has been around for a long time.
If you rotated the journal
more times than KEEPCKPS and KEEPLOGS, the old method would remove all of your logs and
checkpoints because it didn't actually look at how many were on disk. Found the bug
while reviewing the test harness with Robert.

Adjusted the test harness to account for the change. (Stole from Robert's shelf.)
#17 18590 Robert Cowham Fix failing tests.
Change log filename format to use - instead of : as seperator for date/time component
#16 18589 Russell C. Jackson (Rusty) Changed number of expected logs to 7 since I removed the lines that block the delete of 0-7 from backup_functions.sh
#15 18539 Robert Cowham Fix timing issue and test assertions for log cleaning.
#14 18537 Robert Cowham Tests for recreate_offline_db.sh
Refactor log file assertions.
#13 18536 Robert Cowham Check that log files are getting deleted as appropriate.
#12 18535 Robert Cowham Add option to specify single instance to run - useful for testing in some cases.
#11 16784 C. Thomas Tyler Routine Merge Down to dev from main using:
p4 -s merge -n -b perforce_software-sdp-dev
#10 16563 C. Thomas Tyler Routine Merge Down to dev from main using:
p4 merge -b perforce_software-sdp-dev
p4 resolve -as
#9 16373 C. Thomas Tyler Routine Merge Down to dev from main using:
p4 merge -b perforce_software-sdp-dev
#8 16335 C. Thomas Tyler Routine Merge Down to dev from main using:
p4 merge -b perforce_software-sdp-dev
#7 14136 C. Thomas Tyler Routine merge down to dev from main for SDP
using perforce_software-sdp-dev.
#6 13910 C. Thomas Tyler Updated test swuite.
#5 12245 C. Thomas Tyler Routine merge down from main.
#4 12116 Russell C. Jackson (Rusty) Update dev from main.
#3 12028 C. Thomas Tyler Refreshed SDP dev branch, merging down from main.
#2 11541 Russell C. Jackson (Rusty) Keeping dev up to date.
#1 10638 C. Thomas Tyler Populate perforce_software-sdp-dev.
//guest/perforce_software/sdp/main/Server/test/test_SDP.py
#1 10148 C. Thomas Tyler Promoted the Perforce Server Deployment Package to The Workshop.