#!/usr/bin/env python3 ################################################################################ # # Copyright (c) 2017, Perforce Software, Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL PERFORCE SOFTWARE, INC. BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # DATE # # $Date: 2024/08/14 $ # # SYNOPSIS # # IntegHistoryRebuild.py # # DESCRIPTION # # This script creates a Helix server (metadata and dummy depot files) from # data collected by the IntegHistory.py script (stored in an # IntegHistory.log.gz file). # # The script was originally designed as a support tool to recreate quickly # integration issues without the need of a checkpoint. # # The Helix server is created in the IntegHistoryRebuildRoot directory # under the current directory (automatically removed if it exists). # The created Helix server can be accessed directly via rsh using the # P4CONFIG file created in the P4ROOT directory (P4CONFIG must be set): # P4PORT = rsh:p4d -r IntegHistoryRebuildRoot -L log -vserver=3 -i # P4USER = perforce # P4CLIENT = IntegHistoryRebuild # # REQUIREMENTS # # * Python (tested with 2.7 and 3.4) # * P4Python # * Helix p4d executable in your path # * IntegHistory.py in your current directory or in your PYTHONPATH # or in your path (Linux only) # ################################################################################ from __future__ import print_function from P4 import P4, P4Exception import re import sys import traceback import os import shutil import time import mimetypes import gzip import difflib import binascii from subprocess import Popen, PIPE, STDOUT import shlex sys.path.append('.') import argparse import subprocess try: import IntegHistory except ImportError: pass python3 = sys.version_info[0] >= 3 class DbSchema: def __init__(self, schema): self.table = schema["table"] self.version = schema["version"] self.name = schema["name"] self.type = schema["type"] class DbRecord: def __init__(self, schema): self.schema = schema self.row = {} for i in range(len(self.schema.name)): if self.schema.type[i] == "key" or self.schema.type[i] == "text": self.row[self.schema.name[i]] = "" else: self.row[self.schema.name[i]] = 0 def patch(self, jnl, how): if how == "put": jnl.write("@pv@ ") elif how == "replace": jnl.write("@rv@ ") elif how == "remove": jnl.write("@dv@ ") jnl.write(self.schema.version + " @" + self.schema.table + "@ ") for i in range(len(self.schema.name)): if self.schema.type[i] == "key" or self.schema.type[i] == "text": jnl.write("@") jnl.write(str(self.row[self.schema.name[i]]).replace("@","@@")) if self.schema.type[i] == "key" or self.schema.type[i] == "text": jnl.write("@ ") else: jnl.write(" ") jnl.write("\n") class DbConfig: def __init__(self, schema): self.dbConfigDbSchema = DbSchema(schema) self.dbConfigList = [] def add(self, server, name, value): if name not in self.dbConfigList: record = DbRecord(self.dbConfigDbSchema) record.row["CFsname"] = server record.row["CFname"] = name record.row["CFvalue"] = value self.dbConfigList.append(record) def remove(self, name): for i in range(len(self.dbConfigList)): if self.dbConfigList[i].row["CFname"] == name: del(self.dbConfigList[i]) break def patch(self, jnl): for dbConfig in self.dbConfigList: dbConfig.patch(jnl, "put") class DbCounters: def __init__(self, schema): self.dbCountersDbSchema = DbSchema(schema) self.dbCountersList = [] self.maxChangelist = 0 def add(self, name, value): if not name in self.dbCountersList: record = DbRecord(self.dbCountersDbSchema) record.row["COname"] = name record.row["COvalue"] = value self.dbCountersList.append(record) if name == "change": self.maxChangelist = value def patch(self, jnl): for counter in self.dbCountersList: counter.patch(jnl, "put") class DbUser: def __init__(self, schema): self.dbUserDbSchema = DbSchema(schema) self.dbUserList = [] def add(self, user, email, update, access, fullname): if not user in self.dbUserList: record = DbRecord(self.dbUserDbSchema) record.row["USuser"] = user record.row["USemail"] = email record.row["USupdate"] = update record.row["USaccess"] = access record.row["USfullname"] = fullname self.dbUserList.append(record) def patch(self, jnl): for user in self.dbUserList: user.patch(jnl, "put") class DbDepot: def __init__(self, schema): self.dbDepotDbSchema = DbSchema(schema) self.dbDepotList = [] def add(self, name, type, depth, map): if name not in self.dbDepotList: record = DbRecord(self.dbDepotDbSchema) record.row["DPname"] = name record.row["DPtype"] = type record.row["DPextra"] = depth record.row["DPmap"] = map self.dbDepotList.append(record) def getStreamPath(self, depotFile): streamPath = "" streamPath = 0 streamDepth = 0 depot = depotFile.split('/')[2] for d in self.dbDepotList: if d.row["DPname"] == depot: if d.row["DPextra"]: streamDepth = int(d.row["DPextra"]) break if streamDepth > 0: nbSlash = 0 for i, char in enumerate(depotFile): if (not python3 and char == '/') or (python3 and char == 47): nbSlash += 1 if nbSlash > streamDepth + 2: streamPath = depotFile[:i] break return(streamPath) def patch(self, jnl): for depot in self.dbDepotList: depot.patch(jnl, "put") class DbStream: def __init__(self, schema): self.dbStreamDbSchema = DbSchema(schema) self.dbStreamDict = {} def add(self, STstream, STparent, STtitle, STtype, STpreview, STparentview): if STstream not in self.dbStreamDict: record = DbRecord(self.dbStreamDbSchema) record.row["STstream"] = STstream record.row["STparent"] = STparent record.row["STtitle"] = STtitle record.row["STtype"] = STtype record.row["STpreview"] = STpreview if "STparentview" in self.dbStreamDbSchema.name: record.row["STparentview"] = STparentview self.dbStreamDict[STstream] = record def update(self, STstream, STchange, STcopychg, STmergechg, SThighchg, SThash, STstatus): self.dbStreamDict[STstream].row["STchange"] = STchange.replace("default","0") self.dbStreamDict[STstream].row["STcopychg"] = STcopychg.replace("default","0") self.dbStreamDict[STstream].row["STmergechg"] = STmergechg.replace("default","0") self.dbStreamDict[STstream].row["SThighchg"] = SThighchg.replace("default","0") self.dbStreamDict[STstream].row["SThash"] = SThash self.dbStreamDict[STstream].row["STstatus"] = STstatus def isTaskStream(self, stream): isTask = False if stream in self.dbStreamDict: type = self.dbStreamDict[stream].row["STtype"] if type == 4 or type == 5: isTask = True return(isTask) def patch(self, jnl): for stream in self.dbStreamDict: self.dbStreamDict[stream].patch(jnl, "put") class DbDomain: def __init__(self, schema): self.dbDomainDbSchema = DbSchema(schema) self.dbDomainList = [] def add(self, name, type, mount, owner, update, access, options, desc): if name not in self.dbDomainList: record = DbRecord(self.dbDomainDbSchema) record.row["DOname"] = name record.row["DOtype"] = type record.row["DOmount"] = mount record.row["DOowner"] = owner record.row["DOupdate"] = update record.row["DOaccess"] = access record.row["DOoptions"] = options record.row["DOdesc"] = desc self.dbDomainList.append(record) def patch(self, jnl): for domain in self.dbDomainList: domain.patch(jnl, "put") class DbTemplate: def __init__(self, schema): self.dbTemplateDbSchema = DbSchema(schema) self.dbTemplateList = [] self.seq = 0 def add(self, name, change, parent, type, path, vfile, dfile, cmap): record = DbRecord(self.dbTemplateDbSchema) record.row["TVname"] = name record.row["TVchange"] = change record.row["TVseq"] = self.seq record.row["TVparent"] = parent record.row["TVtype"] = type record.row["TVpath"] = path record.row["TVvfile"] = vfile record.row["TVdfile"] = dfile if "TVcmap" in self.dbTemplateDbSchema.name: if self.dbTemplateDbSchema.type[self.dbTemplateDbSchema.name.index("TVcmap")] == "int" and cmap == "": record.row["TVcmap"] = -1 else: record.row["TVcmap"] = cmap self.dbTemplateList.append(record) self.seq = self.seq + 1 def patch(self, jnl): for template in self.dbTemplateList: template.patch(jnl, "put") class DbView: def __init__(self, schema): self.dbViewDbSchema = DbSchema(schema) self.dbViewList = [] def add(self, name, seq, mapflag, vfile, dfile): record = DbRecord(self.dbViewDbSchema) record.row["VIname"] = name record.row["VIseq"] = seq record.row["VImapflag"] = mapflag record.row["VIvfile"] = vfile record.row["VIdfile"] = dfile self.dbViewList.append(record) def patch(self, jnl): for view in self.dbViewList: view.patch(jnl, "put") class DbInteged: def __init__(self, schema): self.dbIntegedDbSchema = DbSchema(schema) self.dbIntegedList = [] def add(self, tfile, ffile, sfrev, efrev, strev, etrev, how, change): record = DbRecord(self.dbIntegedDbSchema) record.row["INtfile"] = tfile record.row["INffile"] = ffile record.row["INsfrev"] = sfrev record.row["INefrev"] = efrev record.row["INstrev"] = strev record.row["INetrev"] = etrev record.row["INhow"] = how record.row["INchange"] = change self.dbIntegedList.append(record) def patch(self, jnl): for integed in self.dbIntegedList: integed.patch(jnl, "put") class DbIntegtx: def __init__(self, schema): self.dbIntegtxDbSchema = DbSchema(schema) self.dbIntegtxList = [] def add(self, tfile, ffile, sfrev, efrev, strev, etrev, how, change): record = DbRecord(self.dbIntegtxDbSchema) record.row["INtfile"] = tfile record.row["INffile"] = ffile record.row["INsfrev"] = sfrev record.row["INefrev"] = efrev record.row["INstrev"] = strev record.row["INetrev"] = etrev record.row["INhow"] = how record.row["INchange"] = change self.dbIntegtxList.append(record) def patch(self, jnl): for integed in self.dbIntegtxList: integed.patch(jnl, "put") class DbRev: def __init__(self, schema): self.dbRevDbSchema = DbSchema(schema) self.dbRevList = [] def isIncluded(self, other): included = False for rev in self.dbRevList: if (rev.row["REdfile"] == other.row["REdfile"] and rev.row["RErev"] == other.row["RErev"]): included = True break return(included) def add(self, dfile, rev, type, action, change, date, modtime, digest, size, traitlot, alazy, afile, arev, atype): record = DbRecord(self.dbRevDbSchema) record.row["REdfile"] = dfile record.row["RErev"] = rev record.row["REtype"] = type record.row["REaction"] = action record.row["REchange"] = change record.row["REdate"] = date record.row["REmodtime"] = modtime record.row["REdigest"] = digest record.row["REsize"] = size record.row["REtraitlot"] = traitlot record.row["REalazy"] = alazy record.row["REafile"] = afile record.row["REarev"] = arev record.row["REatype"] = atype if not self.isIncluded(record): self.dbRevList.append(record) def patch(self, jnl): for rev in self.dbRevList: rev.patch(jnl, "put") def getAction(self, dfile, revnum): action = -1 for rev in self.dbRevList: if rev.row["REdfile"] == dfile and rev.row["RErev"] == revnum: action = rev.row["REaction"] break return(action) def getREafile(self, dfile, revnum): afile = "" for rev in self.dbRevList: if rev.row["REdfile"] == dfile and rev.row["RErev"] == revnum: afile = rev.row["REafile"] break return(afile) def getREarev(self, dfile, revnum): arev = -1 for rev in self.dbRevList: if rev.row["REdfile"] == dfile and rev.row["RErev"] == revnum: arev = rev.row["REarev"] break return(arev) def updateDeleteLbr(self, srcFile, srcRev, tgtFile, tgtRev): afile = arev = size = "" for rev in self.dbRevList: if rev.row["REdfile"] == srcFile and rev.row["RErev"] == srcRev: afile = rev.row["REafile"] arev = rev.row["REarev"] size = rev.row["REsize"] break; if afile: index = 0 for rev in self.dbRevList: if rev.row["REdfile"] == tgtFile and rev.row["RErev"] == tgtRev: self.dbRevList[index].row["REafile"] = afile self.dbRevList[index].row["REarev"] = arev self.dbRevList[index].row["REsize"] = size break; index += 1 def getList(self): return(sorted(self.dbRevList, key = lambda rev: (rev.row["REdfile"], re.sub('^1\.', '', rev.row["REarev"])), reverse=True)) class DbRevtx: def __init__(self, schema): self.dbRevtxDbSchema = DbSchema(schema) self.dbRevtxList = [] def isIncluded(self, other): included = False for rev in self.dbRevtxList: if (rev.row["REdfile"] == other.row["REdfile"] and rev.row["RErev"] == other.row["RErev"]): included = True break return(included) def add(self, dfile, rev, type, action, change, date, modtime, digest, size, traitlot, alazy, afile, arev, atype): record = DbRecord(self.dbRevtxDbSchema) record.row["REdfile"] = dfile record.row["RErev"] = rev record.row["REtype"] = type record.row["REaction"] = action record.row["REchange"] = change record.row["REdate"] = date record.row["REmodtime"] = modtime record.row["REdigest"] = digest record.row["REsize"] = size record.row["REtraitlot"] = traitlot record.row["REalazy"] = alazy record.row["REafile"] = afile record.row["REarev"] = arev record.row["REatype"] = atype if not self.isIncluded(record): self.dbRevtxList.append(record) def patch(self, jnl): for rev in self.dbRevtxList: rev.patch(jnl, "put") def getREalazy(self, dfile, revnum): lazy = -1 for rev in self.dbRevtxList: if rev.row["REdfile"] == dfile and rev.row["RErev"] == revnum: lazy = rev.row["REalazy"] break return(lazy) class DbTraits: def __init__(self, schema): self.dbTraitsDbSchema = DbSchema(schema) self.traitList = [] def add(self, traitlot, name, type, value): record = DbRecord(self.dbTraitsDbSchema) record.row["TTtraitlot"] = traitlot record.row["TTname"] = name record.row["TTtype"] = type record.row["TTvalue"] = str(int(len(value) / 2)) + " " + value self.traitList.append(record) def patch(self, jnl): for trait in self.traitList: trait.patch(jnl, "put") class DbChange: def __init__(self, schema): self.dbChangeDbSchema = DbSchema(schema) self.dbChangeList = [] def add(self, change, key, client, user, date, status, desc, root, importedBy): if not change in self.dbChangeList: record = DbRecord(self.dbChangeDbSchema) record.row["CHchange"] = change record.row["CHkey"] = key record.row["CHclient"] = client record.row["CHuser"] = user record.row["CHdate"] = date record.row["CHstatus"] = status record.row["CHdesc"] = desc record.row["CHroot"] = root record.row["CHimporter"] = importedBy self.dbChangeList.append(record) def patch(self, jnl): for change in self.dbChangeList: change.patch(jnl, "put") class DbDesc: def __init__(self, schema): self.dbDescDbSchema = DbSchema(schema) self.dbDescList = [] def add(self, key, desc): if not key in self.dbDescList: record = DbRecord(self.dbDescDbSchema) record.row["DEkey"] = key record.row["DEdesc"] = desc self.dbDescList.append(record) def patch(self, jnl): for desc in self.dbDescList: desc.patch(jnl, "put") class DbProtect: def __init__(self, schema): self.dbProtectDbSchema = DbSchema(schema) self.dbProtectList = [] def add(self, seq, group, user, host, perm, mapflag, dfile): if not seq in self.dbProtectList: record = DbRecord(self.dbProtectDbSchema) record.row["PRseq"] = seq record.row["PRgroup"] = group record.row["PRuser"] = user record.row["PRhost"] = host record.row["PRperm"] = perm record.row["PRmapflag"] = mapflag record.row["PRdfile"] = dfile self.dbProtectList.append(record) def patch(self, jnl): for protect in self.dbProtectList: protect.patch(jnl, "put") class IntegHistoryRebuild(): def __init__(self, logName): self.logPath = os.getcwd() if logName: if not os.path.dirname(logName): self.logName = os.path.abspath(self.logPath + "/" + logName) else: self.logName = logName else: self.logName = os.path.abspath(self.logPath + "/IntegHistory.log.gz") try: if mimetypes.guess_type(self.logName)[1] == 'gzip': self.log = gzip.open(self.logName, "rt") else: self.log = open(self.logName) except IOError: sys.exit("Error: can\'t open " + self.logName) self.serverVersion = self.getP4dVersion() self.jnlPatch = os.path.abspath(self.logPath + "/" + "jnl.patch") self.serverRoot = os.path.abspath(os.getcwd() + "/" + "IntegHistoryRebuildRoot") self.files = [] self.createServerRoot() os.system("p4d -r \"" + self.serverRoot + "\" -L log -xu >> log") self.p4 = P4() if not self.p4.env('P4CONFIG'): sys.exit("Error: P4CONFIG variable is needed and must be set!") self.p4.port = "rsh:p4d -r \""+ self.serverRoot + "\" " + "-L log -vserver=3 -i" self.p4.user = "super" self.p4.client = "IntegHistoryRebuild" self.p4.charset = "none" try: self.p4.connect() self.dbConfig = DbConfig(self.p4.run_dbschema("db.config")[0]) self.dbCounters = DbCounters(self.p4.run_dbschema("db.counters")[0]) self.dbUser = DbUser(self.p4.run_dbschema("db.user")[0]) self.dbDepot = DbDepot(self.p4.run_dbschema("db.depot")[0]) self.dbStream = DbStream(self.p4.run_dbschema("db.stream")[0]) self.dbDomain = DbDomain(self.p4.run_dbschema("db.domain")[0]) self.dbTemplate = DbTemplate(self.p4.run_dbschema("db.template")[0]) self.dbView = DbView(self.p4.run_dbschema("db.view")[0]) self.dbInteged = DbInteged(self.p4.run_dbschema("db.integed")[0]) self.dbIntegtx = DbIntegtx(self.p4.run_dbschema("db.integtx")[0]) self.dbRev = DbRev(self.p4.run_dbschema("db.rev")[0]) self.dbRevtx = DbRevtx(self.p4.run_dbschema("db.revtx")[0]) self.dbTraits = DbTraits(self.p4.run_dbschema("db.traits")[0]) self.dbChange = DbChange(self.p4.run_dbschema("db.change")[0]) self.dbDesc = DbDesc(self.p4.run_dbschema("db.desc")[0]) self.dbProtect = DbProtect(self.p4.run_dbschema("db.protect")[0]) self.p4.disconnect() except P4Exception: sys.exit(traceback.format_exc()) self.info = {} self.branchName = "" def getP4dVersion(self): cmd = b"p4d -V" if sys.platform == 'win32': process = subprocess.Popen(cmd.decode(), shell = True, stdout = subprocess.PIPE, stderr = subprocess.PIPE) else: process = subprocess.Popen(cmd, shell = True, stdout = subprocess.PIPE, stderr = subprocess.PIPE) (output, err) = process.communicate() exitCode = process.wait() result = re.findall(b'Rev. P4D/.*?/(.*?)/.* .*', output) if not result: sys.exit("Error: no p4d executable found") return(result[0].decode("utf-8")) def toEpoch(self, date): return(str(int(time.mktime(time.strptime(date,'%Y/%m/%d %H:%M:%S')) - time.timezone))) def parseInfo(self, output): for result in output: match = re.match(r'\.\.\. configurable(.*?)\d+ (.*)', result, re.DOTALL) if match: if match.group(1) == "Name": name = match.group(2) elif match.group(1) == "Value": value = match.group(2).rstrip() self.addConfigurable(name, value) else: match = re.match(r'\.\.\. (.*?) (.*)', result, re.DOTALL) if match: self.info[match.group(1)] = match.group(2) def parseSet(self, output): for result in output: match = re.match(r'(.*?)=(.*?) .*', result, re.DOTALL) if match: self.info[match.group(1)] = match.group(2) def parseConfigure(self, output): type = name = value = "" for result in output: match = re.match(r'\.\.\. (.*?) (.*)', result, re.DOTALL) if match: if match.group(1) == "Type": type = match.group(2) elif match.group(1) == "Name": name = match.group(2) elif match.group(1) == "Value": value = match.group(2).rstrip() if type == "configure" or type == "tunable (configure)": self.addConfigurable(name, value) def addConfigurable(self, name, value): if not ("P4" in name or \ "serverlog" in name or \ "auth" in name or \ "replication" in name or \ "pull" in name or \ name == "security" or \ name == "journalPrefix" or \ name == "server.depot.root" or \ name == "template.client" or \ name == "serviceUser" or \ name == "serverid" or \ name == "submit.identity" or \ name == "client.readonly.dir" or \ name == "server.locks.dir"): self.dbConfig.add("any", name, value) def parseCounters(self, output): for result in output: match = re.match(r'(.*) = (.*)', result, re.DOTALL) if match: name = match.group(1) if not (name == "monitor" or name == "unicode" or name == "upgrade" or name == "security"): value = match.group(2).rstrip() self.dbCounters.add(name, value) def parseClient(self, output): for result in output: match = re.match(r'\.\.\. Stream (.*)', result, re.DOTALL) if match: self.info["Stream"] = match.group(1) def integHow(self, how, fromAction, toAction): value = 0 if how == "merge from": # integration with other changes value = 0 elif how == "merge into": # reverse merge value = 1 elif how == "branch from" and fromAction != 2: # integration was branch of file value = 2 elif how == "branch into": # reverse branch value = 3 elif how == "copy from": # integration took source file whole value = 4 elif how == "copy into": # reverse copy value = 5 elif how == "ignored" and fromAction != 2 and fromAction != 7 and toAction != 2 and toAction != 7: # integration ignored source changes value = 6 elif how == "ignored by" and fromAction != 2 and fromAction != 7 and toAction != 2 and toAction != 7: # reverse ignored value = 7 elif how == "delete from": # integration of delete value = 8 elif how == "delete into" and fromAction == 2 and (toAction == 2 or toAction == 7): # reverse delete value = 9 elif how == "edit into": # reverse of integration downgraded to edit value = 10 elif how == "add into": # reverse of branch downgraded to add value = 11 elif how == "edit from": # merge that the user edited value = 12 elif how == "add from": # branch downgraded to add value = 13 elif how == "moved from": # reverse of renamed file value = 14 elif how == "moved into": # file was renamed value = 15 elif how == "ignored" and (toAction == 2 or toAction == 7): # 'delete' target rev ignoring non-deleted source value = 16 elif how == "ignored by" and (fromAction == 2 or fromAction == 7): # non-deleted source ignored by 'delete' target rev value = 17 elif how == "ignored" and (fromAction == 2 or fromAction == 7): # 'integrate' target rev ignoring deleted source value = 18 elif how == "ignored by" and (toAction == 2 or toAction == 7): # deleted source ignored by 'integrate' target rev value = 19 elif how == "branch from": # deleted branch: branch from value = 20 elif how == "delete into": # deleted branch: delete into value = 21 elif how == "undid": # opposite of merge value = 22 elif how == "undone by": # undo reverse value = 23 elif how == "moved from/undid": # move + undo as one record value = 24 elif how == "moved into/undone": # move undo reverse value = 25 return(value) def addInteged(self, dict): startToRev = dict["startToRev"].replace("#","").replace("none", "0") endToRev = dict["endToRev"].replace("#","").replace("none", "0") toAction = self.dbRev.getAction(dict["toFile"], endToRev) startFromRev = dict["startFromRev"].replace("#","").replace("none", "0") endFromRev = dict["endFromRev"].replace("#","").replace("none", "0") fromAction = self.dbRev.getAction(dict["fromFile"], endFromRev) # Need to patch lbr values for branched move/delete db.rev records as "p4 fstat -Oc" does't provide these data if dict["how"] == "delete into" and fromAction == 7: self.dbRev.updateDeleteLbr(dict["toFile"], str(int(endToRev) - 1), dict["fromFile"], endFromRev) lazy = self.dbRevtx.getREalazy(dict["toFile"], endToRev) isTask = isSparse = False if lazy == -1: lazy = self.dbRevtx.getREalazy(dict["fromFile"], endFromRev) if not lazy == -1: if (lazy & 0x0002) == 0x0002: isSparse = True if (lazy & 0x0004) == 0x0004: isTask = True how = self.integHow(dict["how"], fromAction, toAction) if isTask: self.dbIntegtx.add(dict["toFile"], dict["fromFile"], startFromRev, endFromRev, startToRev, endToRev, how, dict["change"]) # need to create an extra dbIntegtx because "p4 integed" does not report it if not isSparse: self.dbIntegtx.add(dict["fromFile"], dict["toFile"], startToRev, endToRev, startFromRev, endFromRev, how + 1, dict["change"]) if not isTask or isSparse: self.dbInteged.add(dict["toFile"], dict["fromFile"], startFromRev, endFromRev, startToRev, endToRev, how, dict["change"]) def parseInteged(self, output): dict = {} for result in output: match = re.match(r'\.\.\. (toFile) (.*)', result, re.DOTALL) if match: if dict: self.addInteged(dict) dict = {} dict[match.group(1)] = match.group(2) else: match = re.search('\.\.\. (.*?) (.*)', result) if match: dict[match.group(1)] = match.group(2) if dict: self.addInteged(dict) def fileType(self, type): value = 0 if re.search('.*text.*', type): value = 0x0000000 type = type.replace("text","") elif re.search('.*binary.*', type): if re.search('.*F.*', type): value = 0x0010000 else: value = 0x0010003 type = type.replace("binary","") elif re.search('.*unicode.*', type): value = 0x0080000 type = type.replace("unicode","") elif re.search('.*symlink.*', type): value = 0x0040000 type = type.replace("symlink","") elif re.search('.*apple.*', type): value = 0x000C0000 type = type.replace("apple","") elif re.search('.*resource.*', type): value = 0x00050000 type = type.replace("resource","") elif re.search('.*utf16.*', type): value = 0x1080000 type = type.replace("utf16","") elif re.search('.*utf8.*', type): value = 0x1040000 type = type.replace("utf8","") if re.search('.*x.*', type): value = value | 0x0020000 if re.search('.*ko.*', type): value = value | 0x0010 elif re.search('.*k.*', type): value = value | 0x0020 if re.search('.*\+.*l.*', type): value = value | 0x0040 if re.search('.*w.*', type): value = value | 0x0100000 if re.search('.*m.*', type): value = value | 0x0200000 if re.search('.*C.*|.*c.*', type): value = value | 0x0000003 if re.search('.*u.*', type): value = value | 0x0000001 if re.search('.*F.*', type): value = value | 0x0000001 if re.search('^l.*', type): value = value | 0x0000001 if re.search('.*D.*', type): value = value & 0x10D0000 if re.search('.*X.*', type): value = value | 0x0000008 match = re.search('.*S(\d+).*|.*S.*', type) if match: value = value | 0x0080 if match.group(1) == "2": value = value | 0x0100 if match.group(1) == "3": value = value | 0x0200 if match.group(1) == "4": value = value | 0x0300 if match.group(1) == "5": value = value | 0x0400 if match.group(1) == "6": value = value | 0x0500 if match.group(1) == "7": value = value | 0x0600 if match.group(1) == "8": value = value | 0x0700 if match.group(1) == "9": value = value | 0x0800 if match.group(1) == "10": value = value | 0x0900 if match.group(1) == "16": value = value | 0x0A00 if match.group(1) == "32": value = value | 0x0B00 if match.group(1) == "64": value = value | 0x0C00 if match.group(1) == "128": value = value | 0x0D00 if match.group(1) == "256": value = value | 0x0E00 if match.group(1) == "512": value = value | 0x0F00 return(value) def actionType(self, action): value = 0 if action == "add": value = 0 elif action == "edit": value = 1 elif action == "delete": value = 2 elif action == "branch": value = 3 elif action == "integrate": value = 4 elif action == "import": value = 5 elif action == "purge": value = 6 elif action == "move/delete": value = 7 elif action == "move/add": value = 8 elif action == "archive": value = 9 return(value) def getRevStatus(self, lazy, sparse, task, charset): value = 0 if charset == "none": value = 0 elif charset == "utf8": value = 1 elif charset == "iso8859-1": value = 2 elif charset == "utf16-nobom": value = 3 elif charset == "shiftjis": value = 4 elif charset == "eucjp": value = 5 elif charset == "winansi": value = 6 elif charset == "cp850": value = 7 elif charset == "macosroman": value = 8 elif charset == "iso8859-15": value = 9 elif charset == "iso8859-5": value = 10 elif charset == "koi8-r": value = 11 elif charset == "cp1251": value = 12 elif charset == "utf16le": value = 13 elif charset == "utf16be": value = 14 elif charset == "utf16le-bom": value = 15 elif charset == "utf16be-bom": value = 16 elif charset == "utf16-bom": value = 17 elif charset == "utf8-bom": value = 18 elif charset == "utf32-nobom": value = 19 elif charset == "utf32le": value = 20 elif charset == "utf32be": value = 21 elif charset == "utf32le-bom": value = 22 elif charset == "utf32be-bom": value = 23 elif charset == "utf32": value = 24 elif charset == "UTF_8_UNCHECKED": value = 25 elif charset == "UTF_8_UNCHECKED_BOM": value = 26 elif charset == "cp949": value = 27 elif charset == "cp936": value = 28 elif charset == "cp950": value = 29 elif charset == "cp850": value = 30 elif charset == "cp858": value = 31 elif charset == "cp1253": value = 32 elif charset == "iso8859-7": value = 33 value = value << 24 if lazy == "1": value = value | 0x0001 if sparse == True: value = value | 0x0002 if task == True: value = value | 0x0004 return(value) def addRev(self, dict, traitsdict): if "depotFile" in dict: if traitsdict: for traitLot in traitsdict: traitType = traitsdict[traitLot]["type"] traitName = traitsdict[traitLot]["name"] if python3: traitValue = binascii.b2a_hex(bytes(traitsdict[traitLot]["value"],'ascii')).decode('ascii').upper() refsValue = binascii.b2a_hex(bytes(traitsdict[traitLot]["refs"],'ascii')).decode('ascii').upper() else: traitValue = binascii.b2a_hex(bytes(traitsdict[traitLot]["value"])).upper() refsValue = binascii.b2a_hex(bytes(traitsdict[traitLot]["refs"])).upper() self.dbTraits.add(traitLot, traitName, traitType, traitValue) self.dbTraits.add(traitLot, "refs", 0, refsValue) if "lbrFile" not in dict: if dict["headAction"] == "move/delete" and re.search('.*\+.*S(\d+).*|.*\+.*S.*', dict["headType"]): dict["lbrFile"] = self.dbRev.getREafile(dict["depotFile"], str(int(dict["headRev"])-1)) dict["lbrRev"] = self.dbRev.getREarev(dict["depotFile"], str(int(dict["headRev"])-1)) else: dict["lbrFile"] = dict["depotFile"] dict["lbrRev"] = "1." + dict["headChange"] dict["lbrType"] = dict["headType"] lazy = self.getRevStatus(dict["lbrIsLazy"], dict["isSparse"], dict["isTask"], dict["headCharset"]) streamPath = self.dbDepot.getStreamPath(dict["depotFile"]) isTask = self.dbStream.isTaskStream(streamPath) if isTask: self.dbRevtx.add(dict["depotFile"], dict["headRev"], self.fileType(dict["headType"]), self.actionType(dict["headAction"]), dict["headChange"], dict["headTime"], dict["headModTime"], dict["digest"], dict["fileSize"], dict["traitLot"], lazy, dict["lbrFile"], dict["lbrRev"], self.fileType(dict["lbrType"])) if not isTask or dict["isSparse"]: self.dbRev.add(dict["depotFile"], dict["headRev"], self.fileType(dict["headType"]), self.actionType(dict["headAction"]), dict["headChange"], dict["headTime"], dict["headModTime"], dict["digest"], dict["fileSize"], dict["traitLot"], lazy, dict["lbrFile"], dict["lbrRev"], self.fileType(dict["lbrType"])) def parseFstat(self, output): dict = {} traitsdict = {} for result in output: match = re.match(r'\.\.\. depotFile (.*)', result, re.DOTALL) if match: self.addRev(dict, traitsdict) dict = {} traitsdict = {} dict["depotFile"] = match.group(1) dict["traitLot"] = 0 dict["digest"] = "00000000000000000000000000000000" dict["fileSize"] = "-1" dict["lbrIsLazy"] = 0 dict["isTask"] = False dict["isSparse"] = False dict["headCharset"] = "none" else: match = re.match(r'\.\.\. attr(\d+?)-refs (.*)', result, re.DOTALL) if match: if not match.group(1) in traitsdict: dict["traitLot"] = match.group(1) traitsdict[match.group(1)] = {} traitsdict[match.group(1)]["refs"] = match.group(2) else: match = re.match(r'\.\.\. attr(\d+?)-(.*) (.*)', result, re.DOTALL) if match: if not match.group(1) in traitsdict: dict["traitLot"] = match.group(1) traitsdict[match.group(1)] = {} traitsdict[match.group(1)]["type"] = 1 traitsdict[match.group(1)]["name"] = match.group(2) traitsdict[match.group(1)]["value"] = match.group(3) else: match = re.match(r'\.\.\. attrProp(\d+?)-(.*) (.*)', result, re.DOTALL) if match: if not match.group(1) in traitsdict: dict["traitLot"] = match.group(1) traitsdict[match.group(1)] = {} traitsdict[match.group(1)]["type"] = 2 traitsdict[match.group(1)]["name"] = match.group(2) traitsdict[match.group(1)]["value"] = match.group(3) else: match = re.match(r'\.\.\. (.*?) (.*)', result, re.DOTALL) if match: if not match.group(2): dict[match.group(1)] = True else: dict[match.group(1)] = match.group(2) if dict: self.addRev(dict, traitsdict) def getStatus(self, status, type): value = 0 if status == "pending" and type == "public": value = 0 elif status == "submitted" and type == "public": value = 1 elif status == "shelved" and type == "public": value = 2 elif status == "pending" and type == "restricted": value = 4 elif status == "submitted" and type == "restricted": value = 5 elif status == "shelved" and type == "restricted": value = 8 elif status == "shelved" and type == "promoted": value = 16 return(value) def parseChanges(self, output): dict = {} for result in output: match = re.match(r'\.\.\. (desc)(\n.*)', result, re.DOTALL) if match: dict[match.group(1)] = match.group(2) else: match = re.match(r'\.\.\. (.*?) (.*)', result, re.DOTALL) if match: dict[match.group(1)] = match.group(2) if dict: if not "oldChange" in dict: dict["oldChange"] = dict["change"] if not "desc" in dict: dict["desc"] = "" if not "changeImportedBy" in dict: dict["changeImportedBy"] = "" self.dbChange.add(dict["change"], dict["oldChange"], dict["client"], dict["user"], dict["time"], self.getStatus(dict["status"], dict["changeType"]), dict["desc"][:31], dict["path"], dict["changeImportedBy"]) self.dbDesc.add(dict["oldChange"], dict["desc"]) def depotType(self, type): value = 0 if type == "local": value = 0 elif type == "remote": value = 1 elif type == "spec": value = 2 elif type == "stream": value = 3 elif type == "archive": value = 4 elif type == "unload": value = 5 elif type == "tangent": value = 6 elif type == "graph": value = 6 return(value) def parseDepot(self, output): dict = {} for result in output: match = re.match(r'\.\.\. (Description)(\n.*)', result, re.DOTALL) if match: dict[match.group(1)] = match.group(2) else: match = re.match(r'\.\.\. (StreamDepth) .*/(.*)', result, re.DOTALL) if match: dict[match.group(1)] = match.group(2) else: match = re.match(r'\.\.\. (.*?) (.*)', result, re.DOTALL) if match: dict[match.group(1)] = match.group(2) if dict: if not "Owner" in dict: dict["Owner"] = "" if not "StreamDepth" in dict: dict["StreamDepth"] = "" if not "Description" in dict: dict["Description"] = "" self.dbDepot.add(dict["Depot"], self.depotType(dict["Type"]), dict["StreamDepth"], dict["Depot"] + "/...") self.dbDomain.add(dict["Depot"], self.getDomainType("depot"), "", dict["Owner"], self.toEpoch(dict["Date"]), self.toEpoch(dict["Date"]), 0, dict["Description"]) def streamType(self, type): value = 0 if type == "mainline": value = 0 elif type == "release": value = 1 elif type == "development": value = 2 elif type == "virtual": value = 3 elif type == "task": value = 4 elif type == "task - unloaded": value = 5 return(value) def streamPathType(self, type): value = 0 if type == "share": value = 0 elif type == "isolate": value = 1 elif type == "import": value = 2 elif type == "exclude": value = 3 elif type == "Remapped": value = 4 elif type == "Ignored": value = 5 elif type == "exclude": value = 6 elif type == "import+": value = 7 elif type == "inclusive+": value = 8 elif type == "import&": value = 15 elif type == "inclusive&": value = 16 return(value) def streamOptions(self, options): value = 0 if re.search('.*ownersubmit.*', options): value = value | 0x0001 if re.search('.* locked.*', options): value = value | 0x0002 if re.search('.* toparent.*', options): value = value | 0x0004 if re.search('.* fromparent.*', options): value = value | 0x0008 if re.search('.*mergeany.*', options): value = value | 0x0010 return(value) def parseStream(self, output): dict = {} mappings = {} for result in output: match = re.match(r'\.\.\. (\w+?)(\d+) (.*)', result, re.DOTALL) if match: key = match.group(1) index = int(match.group(2)) args = match.group(3) if not key in mappings: mappings[key] = {} match = re.match(r'"(.*?)" "(.*)"', args, re.DOTALL) if match: mappings[key][index] = [match.group(1), match.group(2)] else: match = re.match(r'(.*?) "(.*?)" "(.*)"', args, re.DOTALL) if match: mappings[key][index] = [match.group(1), match.group(2), match.group(3)] else: match = re.match(r'(.*?) "(.*?)" (.*)', args, re.DOTALL) if match: mappings[key][index] = [match.group(1), match.group(2), match.group(3)] else: match = re.match(r'(.*?) (.*?) "(.*)"', args, re.DOTALL) if match: mappings[key][index] = [match.group(1), match.group(2), match.group(3)] else: match = re.match(r'(.*?) "(.*)"', args, re.DOTALL) if match: mappings[key][index] = [match.group(1), match.group(2)] else: match = re.match(r'"(.*?)"', args, re.DOTALL) if match: mappings[key][index] = [match.group(1), ""] else: match = re.match(r'(.*?) (.*?) (.*)', args, re.DOTALL) if match: mappings[key][index] = [match.group(1), match.group(2), match.group(3)] else: match = re.match(r'(.*?) (.*)', args, re.DOTALL) if match: mappings[key][index] = [match.group(1), match.group(2)] else: match = re.match(r'(.*)', args, re.DOTALL) if match: mappings[key][index] = [match.group(1)] else: print("Please, report to support no match for ", result) else: match = re.match(r'\.\.\. (Description)(\n.*)', result, re.DOTALL) if match: dict[match.group(1)] = match.group(2) else: match = re.match(r'\.\.\. (\w+?) (.*)', result, re.DOTALL) if match: dict[match.group(1)] = match.group(2) if dict: update = access = 0 if "Update" in dict: update = self.toEpoch(dict["Update"]) if "Access" in dict: access = self.toEpoch(dict["Access"]) if not "Description" in dict: dict["Description"] = "" parentview = 1 if "ParentView" in dict: if dict["ParentView"] == "noinherit": parentview = 0 description = dict["Description"] self.dbStream.add(dict["Stream"], dict["Parent"], dict["Name"], self.streamType(dict["Type"]), update, parentview) self.dbDomain.add(dict["Stream"], self.getDomainType("stream"), "", dict["Owner"], update, access, self.streamOptions(dict["Options"]), description) for key in mappings: for seq in sorted(mappings[key]): cmap = "" if key == "Paths": vfile = dfile = "" list = mappings[key][seq] if len(list) > 1: vfile = list[1] if len(list) > 2: if list[2].find("@") == -1: dfile = list[2] else: dfile = list[2].split("@")[0] cmap = list[2].split("@")[1] self.dbTemplate.add(dict["Stream"], self.dbCounters.maxChangelist, dict["Parent"], self.streamType(dict["Type"]), self.streamPathType(list[0]), vfile, dfile, cmap) elif key == "Remapped": list = mappings[key][seq] vfile = list[1] dfile = list[0] self.dbTemplate.add(dict["Stream"], self.dbCounters.maxChangelist, dict["Parent"], self.streamType(dict["Type"]), self.streamPathType(key), vfile, dfile, cmap) elif key == "Ignored": list = mappings[key][seq] dfile = "..." + list[0] self.dbTemplate.add(dict["Stream"], self.dbCounters.maxChangelist, dict["Parent"], self.streamType(dict["Type"]), self.streamPathType(key), "", dfile, cmap) def parseIstat(self, output): dict = {} for result in output: match = re.match(r'\.\.\. (.*?) (.*)', result, re.DOTALL) if match: dict[match.group(1)] = match.group(2) if dict: self.dbStream.update(dict["stream"], dict["change"], dict["copyParent"], dict["mergeParent"], dict["mergeHighVal"], dict["branchHash"], dict["status"]) def getDomainType(self, type): value = 0 if type == "unloaded client": value = 67 elif type == "unloaded label": value = 76 elif type == "unloaded task stream": value = 83 elif type == "branch": value = 98 elif type == "client": value = 99 elif type == "depot": value = 100 elif type == "label": value = 108 elif type == "stream": value = 115 elif type == "typemap": value = 116 return value def getMapFlag(self, flag): value = 0 if flag == '-': value = 1 elif flag == '+': value = 2 elif flag == '$': value = 3 elif flag == '@': value = 4 elif flag == '&': value = 5 return value def parseBranch(self, output): dict = {} for result in output: match = re.match(r'\.\.\. (Description)(\n.*)', result, re.DOTALL) if match: dict[match.group(1)] = match.group(2) else: match = re.match(r'\.\.\. View(\d+) (.*) (.*)', result, re.DOTALL) if match: if "view" not in dict: dict["view"] = {} dict["view"][match.group(1)] = [match.group(2), match.group(3)] else: match = re.match(r'\.\.\. (.*?) (.*)', result, re.DOTALL) if match: dict[match.group(1)] = match.group(2) if dict: self.branchName = dict["Branch"] if not "Owner" in dict: dict["Owner"] = "" if not "Description" in dict: dict["Description"] = "" self.dbDomain.add(self.branchName, self.getDomainType("branch"), "", dict["Owner"], self.toEpoch(dict["Update"]), self.toEpoch(dict["Access"]), dict["Options"], dict["Description"]) for seq in dict["view"]: mapflag = self.getMapFlag(dict["view"][seq][0][0]) if mapflag > 0: dict["view"][seq][0] = dict["view"][seq][0][1:] self.dbView.add(dict["Branch"], seq, mapflag, dict["view"][seq][1], dict["view"][seq][0]) def parse(self): configure = False try: for line in self.log: list = eval(str(line)) cmd = list[0] output = list[1:] if cmd == "files": for file in output: self.files.append(file) else: match = re.search('^p4.*? (.*)', cmd) if match: if re.search(" info", cmd): self.parseInfo(output) elif re.search(" set", cmd): self.parseSet(output) elif re.search(" configure ", cmd): configure = True self.parseConfigure(output) elif re.search(" integed ", cmd): self.parseInteged(output) elif re.search(" fstat ", cmd): self.parseFstat(output) elif re.search(" depot ", cmd): self.parseDepot(output) elif re.search(" stream ", cmd): self.parseStream(output) elif re.search(" istat ", cmd): self.parseIstat(output) elif re.search(" changes ", cmd): self.parseChanges(output) elif re.search(" counters", cmd): self.parseCounters(output) elif re.search(" protects -m", cmd): self.parseInfo(output) elif re.search(" client ", cmd): self.parseClient(output) elif re.search(" branch ", cmd): self.parseBranch(output) except Exception as err: if type(err).__name__ == "UnicodeDecodeError": print("Error detected: need a Python 3 fix to deal with non-asii characters, run IntegHistoryRebuild.py with Python 2") else: print(traceback.format_exc()) exit() self.log.close() now = self.toEpoch(time.strftime('%Y/%m/%d %H:%M:%S',time.localtime())) self.dbUser.add("perforce", "perforce@perforce" , now, now, "write") self.dbProtect.add(1, 0, "perforce", "*", 31, 0, "//...") self.dbUser.add("super", "super@super" , now, now, "super") self.dbProtect.add(2, 0, "super", "*", 255, 0, "//...") self.dbConfig.add("any", "lbr.verify.out", 0) if not configure: if "unicode" in self.info: self.dbConfig.add("any", "unicode", "1") if "integEngine" in self.info: self.dbConfig.add("any", "dm.integ.engine", self.info["integEngine"]) if "integTweaks" in self.info: self.dbConfig.add("any", "dm.integ.tweaks", self.info["integTweaks"]) def createClient(self, serverRoot): clientRoot = serverRoot + "/workspace" os.mkdir(clientRoot) client = self.p4.fetch_client() client["Root"] = clientRoot if "Stream" in self.info: client["Stream"] = self.info["Stream"] self.p4.save_client(client) def createJnlPatch(self): jnlFile = open(self.jnlPatch, "wt") self.dbConfig.patch(jnlFile) self.dbCounters.patch(jnlFile) self.dbDepot.patch(jnlFile) self.dbUser.patch(jnlFile) self.dbStream.patch(jnlFile) self.dbDomain.patch(jnlFile) self.dbTemplate.patch(jnlFile) self.dbView.patch(jnlFile) self.dbInteged.patch(jnlFile) self.dbIntegtx.patch(jnlFile) self.dbRev.patch(jnlFile) self.dbRevtx.patch(jnlFile) self.dbTraits.patch(jnlFile) self.dbChange.patch(jnlFile) self.dbDesc.patch(jnlFile) self.dbProtect.patch(jnlFile) jnlFile.close() def getP4CONFIG(self): return(self.p4.env('P4CONFIG')) def getServerRoot(self): return(self.serverRoot) def createServerRoot(self): shutil.rmtree(self.serverRoot, ignore_errors = True) if not os.path.exists(self.serverRoot): os.mkdir(self.serverRoot) os.chdir(self.serverRoot) def createServer(self): caseFlag = "-C0" if self.info["caseHandling"] == "insensitive": caseFlag = "-C1" self.createServerRoot() shutil.move(self.jnlPatch, self.serverRoot) os.system("p4d -r \"" + self.serverRoot + "\" " + caseFlag + " -L log -jr jnl.patch > log 2>&1") os.system("p4d -r \"" + self.serverRoot + "\" " + caseFlag + " -L log -xu >> log 2>&1") os.system("p4d -r \"" + self.serverRoot + "\" " + caseFlag + " -L log -xx >> log 2>&1") os.system("p4d -r \"" + self.serverRoot + "\" " + caseFlag + " -L log -jr jnl.fix >> log 2>&1") os.system("p4d -r \"" + self.serverRoot + "\" " + caseFlag + " -L log -jc >> log 2>&1") self.port = "rsh:p4d -r \""+ self.serverRoot + "\" " + caseFlag + " -L log -vserver=3 -i" p4config = os.path.abspath(self.serverRoot + "/" + self.getP4CONFIG()) configFile = open(p4config, "wt") print("P4PORT="+ self.p4.port, file=configFile) if "permMax" in self.info and self.info["permMax"] == "super": user = "super" else: user = "perforce" print("P4USER=" + user, file=configFile) print("P4CLIENT=" + self.p4.client, file=configFile) if "unicode" in self.info: os.system("p4d -r \"" + self.serverRoot + "\" -L log -xi >> log") self.p4.charset = "utf8" if "P4CHARSET" in self.info: self.p4.charset = self.info["P4CHARSET"] print("P4CHARSET=" + self.p4.charset, file=configFile) configFile.close() p4version = os.path.abspath(self.serverRoot + "/.p4version") versionFile = open(p4version, "wt") print("r" + self.serverVersion[2:], file=versionFile) versionFile.close() if float(self.serverVersion) >= 2019.1: os.system("p4d -r \"" + self.serverRoot + "\" -L log -xU BuildStorage191 >> log") self.p4.connect() if float(self.serverVersion) >= 2019.1: try: self.p4.run_storage("-r") except P4Exception: pass self.createClient(self.serverRoot) def createFullFile(self, fullFile, digest, fileSize): fullDir = os.path.dirname(fullFile) if not os.path.exists(fullDir): os.makedirs(fullDir) full = open(fullFile, "wt") print(digest + " " + fileSize, file=full) full.close() def createGzipFile(self, gzipFile, digest, fileSize): gzipDir = os.path.dirname(gzipFile) if not os.path.exists(gzipDir): os.makedirs(gzipDir) gz = gzip.open(gzipFile, "wt") print(digest + " " + fileSize, file=gz) gz.close() def createRCSFile(self, rcsFile, lbrRev, date, digest, fileSize): rcsDir = os.path.dirname(rcsFile) if not os.path.exists(rcsDir): os.makedirs(rcsDir) rcs = open(rcsFile, "wt") print("head " + lbrRev + ";", file=rcs) print("access ;", file=rcs) print("symbols ;", file=rcs) print("locks ;comment @@;", file=rcs) print("", file=rcs) print("", file=rcs) print(lbrRev, file=rcs) print("date " + date + "; author p4; state Exp;", file=rcs) print("branches ;", file=rcs) print("next ;", file=rcs) print("", file=rcs) print("", file=rcs) print("desc", file=rcs) print("@@", file=rcs) print("", file=rcs) print("", file=rcs) print(lbrRev, file=rcs) print("log", file=rcs) print("@@", file=rcs) print("text", file=rcs) print("@" + digest + " " + fileSize, file=rcs) print("@", file=rcs) rcs.close() def updateRCSFile(self, rcsFile, lbrRev, date, digest, fileSize): if not os.path.exists(rcsFile): self.createRCSFile(rcsFile, lbrRev, date, digest, fileSize) else: rcs = open(rcsFile, "rt") rcstmp = open(rcsFile+".tmp", "wt") for line in rcs.read().splitlines(): match = re.search('^next ;', line) if match: print("next "+ lbrRev + ";", file=rcstmp) print("", file=rcstmp) print(lbrRev, file=rcstmp) print("date " + date + "; author p4; state Exp;", file=rcstmp) print("branches ;", file=rcstmp) print("next ;", file=rcstmp) else: print(line, file=rcstmp); print("", file=rcstmp) print("", file=rcstmp) print(lbrRev, file=rcstmp) print("log", file=rcstmp) print("@@", file=rcstmp) print("text", file=rcstmp) print("@d1 1", file=rcstmp) print("a1 1", file=rcstmp) print(digest + " " + fileSize, file=rcstmp) print("@", file=rcstmp) rcs.close() rcstmp.close() os.remove(rcsFile) os.rename(rcsFile+".tmp", rcsFile) def createServerDepotFiles(self): for rev in self.dbRev.getList(): if not int(rev.row["REalazy"]) & 0x0001 == 1 and not (rev.row["REaction"] == 2 or rev.row["REaction"] == 7): serverFileType = rev.row["REatype"] & 0x000F reafile = rev.row["REafile"] if self.info["caseHandling"] == "insensitive": reafile = reafile.lower() if serverFileType == 0: rcsFile = os.path.abspath(reafile.replace("//", self.serverRoot + "/") + ",v") self.updateRCSFile(rcsFile, rev.row["REarev"], time.strftime("%Y.%m.%d.%H.%M.%S", time.localtime(int(rev.row["REdate"]))), rev.row["REdigest"], rev.row["REsize"]) elif serverFileType == 1: fullFile = os.path.abspath(reafile.replace("//", self.serverRoot + "/") + ",d/" + rev.row["REarev"]) self.createFullFile(fullFile, rev.row["REdigest"], rev.row["REsize"]) elif serverFileType == 3: gzipFile = os.path.abspath(reafile.replace("//", self.serverRoot + "/") + ",d/" + rev.row["REarev"] + ".gz") self.createGzipFile(gzipFile, rev.row["REdigest"], rev.row["REsize"]) try: self.p4.run_verify("-v", "//...") except P4Exception as e: print(e) pass def filetolist(self, file): alist = [] if mimetypes.guess_type(file)[1] == 'gzip': f = gzip.open(file, "rt") else: f = open(file) lines = f.readlines() lines.sort() for line in lines: line = line.strip() match = re.search(r'\'... (serverVersion .*?\')', line) if match: line = match.group(1) line = re.sub(r', \'\.\.\. Map .*?\'', '', line) line = re.sub(r', \'\.\.\. movedFile .*?\'', '', line) line = re.sub(r', \'\.\.\. clientFile .*?\'', '', line) line = re.sub(r', \'\.\.\. isMapped \'', '', line) line = re.sub(r', \'\.\.\. haveRev .*?\'', '', line) line = re.sub(r', \'\.\.\. action .*?\'', '', line) line = re.sub(r', \'\.\.\. actionOwner .*?\'', '', line) line = re.sub(r', \'\.\.\. resolved \'', '', line) line = re.sub(r', \'\.\.\. unresolved \'', '', line) line = re.sub(r', \'\.\.\. reresolvable \'', '', line) line = re.sub(r', \'\.\.\. change .*?\'', '', line) line = re.sub(r', \'\.\.\. type .*?\'', '', line) line = re.sub(r', \'\.\.\. charset .*?\'', '', line) line = re.sub(r', \'\.\.\. Type .*?\'', '', line) line = re.sub(r', \'\.\.\. Date .*?\'', '', line) line = re.sub(r', \'\.\.\. time .*?\'', '', line) line = re.sub(r', \'\.\.\. headTime .*?\'', '', line) line = re.sub(r', \'\.\.\. headModTime .*?\'', '', line) line = re.sub(r', \'\.\.\. Update .*?\'', '', line) line = re.sub(r', \'\.\.\. Access .*?\'', '', line) line = re.sub(r', \'\.\.\. workRev .*?\'', '', line) line = re.sub(r', \'\.\.\. ourLock .*?\'', '', line) line = re.sub(r', \'\.\.\. \.\.\. other.*?\'', '', line) line = re.sub(r', \'\.\.\. digest .*?\'', '', line) line = re.sub(r', \'\.\.\. fileSize .*?\'', '', line) line = re.sub(r', \'\.\.\. lbrRefCount .*?\'', '', line) line = re.sub(r', \'\.\.\. lbrPath .*?\'', '', line) line = re.sub(r', \'\.\.\. lbrRelPath .*?\'', '', line) line = re.sub(r', \'\.\.\. lbrRelTo .*?\'', '', line) line = re.sub(r', \'\.\.\. lbrRelToPath .*?\'', '', line) line = re.sub(r', \'\.\.\. status .*?\'', '', line) line = re.sub(r', \'\.\.\. extraTag.*? digest\'', '', line) line = re.sub(r', \'\.\.\. extraTag.*? streamSpecDigest\'', '', line) line = re.sub(r', \'\.\.\. streamSpecDigest .*?\'', '', line) line = re.sub(r', \'\.\.\. Description .*?\'', '', line) line = re.sub(r', \'\.\.\. changeIdentity .*?\'', '', line) line = re.sub(r', \'journal = .*?\'', '', line) line = re.sub(r', \'lastCheckpointAction = .*?\'', '', line) line = re.sub(r', \'security = .*?\'', '', line) line = re.sub(r', \'monitor = .*?\'', '', line) if (not re.match(r'\[\'p4 set\'', line) and not re.match(r'\[\'p4 \-ztag user', line) and not re.match(r'\[\'p4 \-ztag client', line) and not re.match(r'\[\'p4 \-ztag protects', line) and not re.match(r'\[\'p4 \-ztag configure show', line) and not re.match(r'\[\'p4 \-ztag branch', line)): alist.append(line) f.close() return(alist) def validation(self): logNameRebuild = os.path.abspath(self.serverRoot + "/IntegHistory.log") try: # first try to run IntegHistory as a module # therefore IntegHistory.py must be in the current directory history = IntegHistory.IntegHistory(self.files, logNameRebuild, self.branchName) logNameRebuild = history.getData() except: # On Linux, the "IntegHistory.py" script can be in the user PATH cmd = "IntegHistory.py " if self.branchName: cmd += "-b " + self.branchName + " " cmd += ' '.join([str(f) for f in self.files]) process = Popen(cmd, shell = True, stdout = PIPE) (output, err) = process.communicate() exitCode = process.wait() if not os.path.exists(logNameRebuild): logNameRebuild = os.path.abspath(self.serverRoot + "/IntegHistory.log.gz") if not os.path.exists(logNameRebuild): print(" ==> Validation failed: Cannot find the IntegHistory.py script") else: fromlines = self.filetolist(self.logName) tolines = self.filetolist(logNameRebuild) validationFilename = os.path.abspath(self.serverRoot + "/validation.log") validationFile = open(validationFilename, "w") errors = False for diff in difflib.unified_diff(fromlines, tolines, fromfile=self.logName, tofile=logNameRebuild, n=0): validationFile.write(diff.strip() + "\n") if not re.match(r'\-\-\-', diff) and not re.match(r'\+\+\+', diff): errors = True validationFile.close() if errors: print(" ==> problems detected during validation, review differences against original IntegHistory log in " + validationFilename) def main(): parser = argparse.ArgumentParser(prog='IntegHistoryRebuild', usage='%(prog)s [IntegHistory-output-filename]') parser.add_argument("logName", nargs="?", help="IntegHistory output filename", default="IntegHistory.log.gz") args = parser.parse_args() logName = args.logName history = IntegHistoryRebuild(logName) print("Reading server data from " + logName + "...") history.parse() print("Creating rebuilt server...") history.createJnlPatch() history.createServer() print("Creating dummy server depot files...") history.createServerDepotFiles() print("Validating rebuilt server...") history.validation() print("Completed!") print("Server root=" + history.getServerRoot()) print("P4CONFIG file=" + os.path.abspath(history.getServerRoot() + "/" + history.getP4CONFIG())) if __name__ == "__main__": main()
# | Change | User | Description | Committed | |
---|---|---|---|---|---|
#66 | 30541 | Pascal Soccard |
Add exception in case "p4 verify -v //..." fails It could fail if there is no data about the parent of a lazy copy. |
||
#65 | 30499 | Pascal Soccard |
The self.p4.run_storage("-r") method could fail if: 'There is no storage upgrade in progress to restart.' |
||
#64 | 30220 | Pascal Soccard | Stream path like "(.*?)" "(.*)" was incorrectly parsed | ||
#63 | 30219 | Pascal Soccard | Reworked previous change that was still not working properly with Python3 | ||
#62 | 30218 | Pascal Soccard | Fixed Python 3 issue only introduced by previous change (change 30185) | ||
#61 | 30185 | Pascal Soccard | Reworked "p4d -V" for Python 2.7 compatibility. | ||
#60 | 30180 | Pascal Soccard | Added ditto mapping stream path type | ||
#59 | 30179 | Pascal Soccard |
Reworked the p4d version detection. Added creation of a .p4version files used by own scripts. |
||
#58 | 30165 | Pascal Soccard |
Reworked how self.p4.user is used Setting P4LOG (-L) for all p4d commands |
||
#57 | 30157 | Pascal Soccard |
Restructured how p4 instance is used (connection now in init method). The "p4 verify -v" is now run as part of the script to update the digest of the dummy server depot files. Customer digest are sill present in file content, so "p4 diff"/"p4 copy" behaviour should not be affected by this change. |
||
#56 | 30060 | Pascal Soccard | Fixed creation of the branch spec when provided (mappings were reverse) | ||
#55 | 29855 | Pascal Soccard | Fixed ParentView parsing | ||
#54 | 29854 | Pascal Soccard | Follow up on IntegHistory.py "p4 fstat -Ob" change | ||
#53 | 29777 | Pascal Soccard | Handled better confusion with P4CHARSET when set in P4CONFIG locally | ||
#52 | 29688 | Pascal Soccard | Reworked how multiple files argument is passed to IntegHistory.py | ||
#51 | 29674 | Pascal Soccard | Fixed parsing issue for Ignored paths with spaces in the path | ||
#50 | 29089 | Pascal Soccard |
Fixed bad initialisation of parentview Added inclusive+ in Type of stream path |
||
#49 | 29060 | Pascal Soccard |
Changed error reporting on failed connection like for example "Segmentation fault (core dumped)" when running corrupted p4d |
||
#48 | 28978 | Pascal Soccard |
Initialisation of Owner and Description for a branch spec as a customer did not have them set for a branch. When a exception is reported, I now report the traceback for easier debugging. |
||
#47 | 28877 | Pascal Soccard | Added feature for collecting a branch spec | ||
#46 | 28689 | Pascal Soccard |
Added support for "p4 -ztag -zconfigurables info". The -zconfigurables option was introduced with 2021.2 and exposes some configurables without having super privileges. This is useful to get the dm.integ configurables which are not exposed by "p4 -ztag info" like the dm.integ.undo configurable. |
||
#45 | 28483 | Pascal Soccard | Perforce user was not added in the protection table | ||
#44 | 28482 | Pascal Soccard | Fixed fstat parsing method; attributes could have more than one - in their name | ||
#43 | 28444 | Pascal Soccard | Undone previous fix and reworked how to deal with change identity | ||
#42 | 28443 | Pascal Soccard | Added CHidentity for changes | ||
#41 | 28115 | Pascal Soccard |
Added exception to better report UnicodeDecodeError which requires to run the script with Python 2 until I found a fix for this issue. |
||
#40 | 28114 | Pascal Soccard | Fixed wrong flag for "p4 storage" (-r instead of -R) | ||
#39 | 28113 | Pascal Soccard | Removed extra keywords in validation | ||
#38 | 27927 | Pascal Soccard |
Fixed a bug for lbrRev which is not necessary 1.X but could be 1.X.Y.Z for files submitted with a very old server release. |
||
#37 | 27904 | Pascal Soccard | Strip newline being added when parsing configurables | ||
#36 | 27889 | Pascal Soccard |
Fixed db.storage records population (maxCommitChange needs to be set) and use wrong "p4 storage" flag. |
||
#35 | 27815 | Pascal Soccard | Some replication configurables should be ignored | ||
#34 | 27552 | Pascal Soccard | Added support for Stream ParentView field | ||
#33 | 27551 | Pascal Soccard |
Improve "p4 configure show" parsing Support for db.integ.tweaks |
||
#32 | 26503 | Pascal Soccard |
Undoing previous fix (missing 2019.1 db.storage population done by phase 2) Replacing it with a better fix |
||
#31 | 26502 | Pascal Soccard |
From 2019.1, background upgrade is done when the server starts and it is currently not supported by rsh hack. Without this fix, some commands like "p4 obliterate" will fail as they are waiting for the upgrade to be completed. Untested on Windows platform. |
||
#30 | 26300 | Pascal Soccard |
Added extra exclusion rules for the validation procedure (more to do for stream) |
||
#29 | 26298 | Pascal Soccard | Fixed parsing issue in Path stream when quote is used (e.g.: share "a file") | ||
#28 | 26261 | Pascal Soccard | Fixed highest privilege level detection | ||
#27 | 25690 | Pascal Soccard |
Fixed "p4 streams" filtering issue when a stream name includes the following characters: ( ) & These characters needs to be escaped in the filter string. |
||
#26 | 24998 | Pascal Soccard |
Fixed IntegHow field for db.integed records involving deleted revisions Added db.change changeImportedBy to clear validation errors |
||
#25 | 24579 | Pascal Soccard |
updateDeleteLbr() method must be called only for move/delete revision Changed default workspace root location (IntegHistoryRebuildRoot/workspace) |
||
#24 | 24573 | Pascal Soccard | Fixed resolve action value for "branch into" | ||
#23 | 24180 | Pascal Soccard |
Reworked task stream test when adding files to db.revtx due to a server bug for converted task stream (task flag is notupdated). Improved validation check by sorting first IntegHistory.log to compare |
||
#22 | 24037 | Pascal Soccard | Fixed binary+F filetype | ||
#21 | 23215 | Pascal Soccard | Fixed filetype binary+D | ||
#20 | 22964 | Pascal Soccard |
Added new options (verbosity and P4 path) Fixed StreamDepth check for older version Escaped \ for Windows path |
||
#19 | 22909 | Pascal Soccard | Fixed some validation rules which were missing an end space | ||
#18 | 22469 | Pascal Soccard |
Added support for task stream Added support for undo actions |
||
#17 | 22323 | Pascal Soccard |
Reworked script to handle lbr values for branched move/delete db.rev records as "p4 fstat -Oc" does't provide these data. Fixed use of stream depth which was not taking into account. |
||
#16 | 22291 | Pascal Soccard | Added new P4 environment variables for command options | ||
#15 | 21218 | Pascal Soccard | Added mergeany/mergedown stream options | ||
#14 | 21041 | Pascal Soccard |
Fixed quote issue with file arguments Matching closely user protection level |
||
#13 | 19869 | Pascal Soccard |
Fixed missing quotes in list file argument which is required due to possible spaces in filename. |
||
#12 | 19698 | Pascal Soccard |
Fixed file revision attributes that was failing for Python 2.7 due to a method compatibily issue. |
||
#11 | 19326 | Pascal Soccard | Missed a log filename change | ||
#10 | 19323 | Pascal Soccard |
Previous IntegHistory.py commpressed code change now requires a filename without a .gz extension. |
||
#9 | 19320 | Pascal Soccard | Added new validation rules | ||
#8 | 19064 | Pascal Soccard |
Checked if the description fields are empty Added extra validation rules to exclude specific client fields |
||
#7 | 19025 | Pascal Soccard | Fixed Stream Remapped that was omitted | ||
#6 | 18993 | Pascal Soccard | Added depot StreamDepth that was introduced in 2015.2 | ||
#5 | 18992 | Pascal Soccard |
Found out that cmap initialisation I tried to fix was due to a scheme data type change. So fixed it accordingly. Fixed description field when starting with a \n character |
||
#4 | 18957 | Pascal Soccard | Fixed more cmap inialisation that got broken with the cmap fix | ||
#3 | 18709 | Pascal Soccard | Fixed cmap inialisation that got broken with previous cmap fix | ||
#2 | 18660 | Pascal Soccard |
Fixed changeMap value (stream template) that was not correctly set. Added support for utf8 filetype. |
||
#1 | 18490 | Pascal Soccard |
This tool recreates a partial Helix server (metadata and dummy depot files) using data collected from a Helix server using p4 commands. |