#!/usr/bin/env python3 import sqlite3 import P4 import argparse from pprint import pprint DATABASE_NAME = "hash.db" class CreateHashDatabase: def __init__(self, name, path): self.path = path self.conn = sqlite3.connect(name) self.conn.execute("Create Table IF NOT EXISTS digest(hash TEXT PRIMARY KEY, filesize INTEGER, depotfile TEXT, revision INTEGER)") self.p4 = P4.P4() self.p4.prog = "CreateHashDatabase" self.p4.connect() self.insert_hashes() self.p4.disconnect() self.conn.close() def insert_hashes(self): class Handler(P4.OutputHandler): def __init__(self, conn): self.conn = conn def outputStat(self, stat): depotFile = stat['depotFile'] values = [] for (n, rev) in enumerate(stat['rev']): digest = stat['digest'][n] fileSize = stat['fileSize'][n] # check if this revision is a lazy copy if 'how' in stat: for how in stat['how']: if how in ("branch from", "copy from"): continue values.append( (digest, fileSize, depotFile, rev) ) try: with self.conn: self.conn.executemany("INSERT INTO digest VALUES (?, ?, ?, ?)", values ) except sqlite3.IntegrityError: print("Duplicate HASH key for ", depotFile) # cursor = self.conn.cursor() # cursor.execute("SELECT depotfile, revision FROM digest WHERE hash = ?", (digest, )) # result = cursor.fetchone() # if result: # print("Previously stored : {file}#{rev}".format(file=result[0], rev=result[1])) return P4.OutputHandler.HANDLED self.p4.run_filelog(self.path, handler=Handler(self.conn)) if __name__ == '__main__': parser = argparse.ArgumentParser("Hash checker") parser.add_argument("-d", "--database", default=DATABASE_NAME) parser.add_argument("-p", "--path", required=True) args = parser.parse_args() prog = CreateHashDatabase(args.database, args.path)
# | Change | User | Description | Committed | |
---|---|---|---|---|---|
#2 | 8287 | Sven Erik Knop | Updated copyright and no warranty notice. | ||
#1 | 8283 | Sven Erik Knop |
Set of useful scripts and triggers in P4Python. Most require Python 3.X to run, but can be made to use Python 2.7 if necessary. The scripts will also need P4Python 2012.2 + to work. changes_month_depot analysis the usage in changes and bytes of a Perforce Server. directory_sizes list the sizes of files within a directory of Perforce hash_database attempts to find identical files submitted several times (not lazy copies) open_view_clients lists all clients with open views Run the first 3 scripts against a (db only sufficient) replica, they can be very heavy on a server. The last file is a simple client form-in trigger to prevent open views. |