#!/usr/bin/env python3 # -*- coding: utf-8 -*- # ============================================================================== # Copyright and license info is available in the LICENSE file included with # the Server Deployment Package (SDP), and also available online: # https://swarm.workshop.perforce.com/projects/perforce-software-sdp/view/main/LICENSE # ------------------------------------------------------------------------------ """ NAME: SwarmReviews.py DESCRIPTION: This script prints Swarm reviews as a csv file. Change id, Review id, Review state, Submitter ID, Submitter Name, Date submitted, Date Review Created, Date Review Last Updated, No of Up Votes, No of Down Votes, Test status, Num Comments, Num Open Tasks, Num Closed Tasks, Job ID, Jira Ticket ID (via Job), Jira Ticket Type, Jira Ticket State, Review Type, Scope of Review, Supporting Docs, Deadline for Review, Actual Review Effort, Jira Ticket Summary (via Job), Review Description You can run like this: p4 login -ap [note the output ticket and use with -t flag in next command] for /f "usebackq" %p in (`p4 dirs //depot/*`) do python.exe swarm_reviews.py -t 1234567ABDBD %p/... >> rep.csv """ # Python 2.7/3.3 compatibility. from __future__ import print_function import sys import requests import os import textwrap import argparse import logging import P4 import time import json import re from collections import defaultdict, OrderedDict from six.moves import range import six script_name = os.path.basename(os.path.splitext(__file__)[0]) LOGDIR = os.getenv('LOGS', '/p4/1/logs') DEFAULT_LOG_FILE = "log-%s.log" % script_name if os.path.exists(LOGDIR): DEFAULT_LOG_FILE = os.path.join(LOGDIR, "%s.log" % script_name) DEFAULT_VERBOSITY = 'DEBUG' LOGGER_NAME = 'P4Triggers' def formatdate(epoch): if isinstance(epoch, six.string_types): epoch = int(epoch) t = time.localtime(epoch) return time.strftime("%Y-%m-%d %H:%M:%S", t) def csv_escape(field): result = str(field).replace("\n", " | ") if "," in result: return '"%s"' % result return result class Review(object): "Wrapper around Swarm JSON" def __init__(self, json): if 'review' in json: self.json = json['review'] else: self.json = json self.comments = [] def addComments(self, comments): self.comments = comments def numComments(self): return len([c for c in self.comments if c['taskState'] == 'comment']) def numOpenTasks(self): return len([c for c in self.comments if c['taskState'] == 'open']) def numClosedTasks(self): return len([c for c in self.comments if c['taskState'] == 'addressed']) def getval(self, k): try: if k in self.json: return str(self.json[k]) except: pass return "" def id(self): return self.getval('id') def state(self): return self.getval('state') def author(self): return self.getval('author') def updated(self): return self.getval('updated') def numVotes(self, value): count = 0 try: if 'participants' in self.json: for p in six.iteritems(self.json['participants']): if p and p[1] and 'vote' in p[1]: if 'value' in p[1]['vote'] and p[1]['vote']['value'] == value: count += 1 except: pass return count def numVotesUp(self): return self.numVotes(1) def numVotesDown(self): return self.numVotes(-1) def testStatus(self): return self.getval('testStatus') def description(self): return self.getval('description') def getField(self, f): if 'description' not in self.json: return "" refield = re.compile("^\s*%s:\s*(.*)" % re.escape(f), re.MULTILINE) m = refield.search(self.json['description']) if m: return m.group(1) def updateValuesFromTemplateDescription(self, rec): """Parse following: Review Type: [Screening | WalkThrough | Inspection] Scope of Review: [Delta Review | Full Review] Supporting Documents: [list of documents with versions] Deadline for the Review: [date] Review Meeting (required for walkthrough or inspection): Planned: [date, participants] Performed: [date, participants] Actual Review Effort: [hours] reviewType,reviewScope,supportingDocs,reviewDeadline,reviewEffort, """ rec['reviewType'] = self.getField('Review Type') rec['reviewScope'] = self.getField('Scope of Review') rec['supportingDocs'] = self.getField('Supporting Documents') rec['reviewDeadline'] = self.getField('Deadline for the Review') rec['reviewEffort'] = self.getField('Actual Review Effort') class SwarmComments(object): """Parses all swarm comment objects - directly from p4 keys output to avoid overhead of individual API calls """ def __init__(self, comments): "Comments is output from p4.run_keys('-e', 'swarm-comment-*')" self.review_comments = defaultdict(list) for c in comments: j = json.loads(c['value']) if 'topic' in j and j['topic'].startswith('review'): id = j['topic'].split('/')[1] self.review_comments[id].append(j) def getReviewComments(self, id): if id in self.review_comments: return self.review_comments[id] return [] class SwarmReviews(object): """See module doc string for details""" def __init__(self, *args, **kwargs): self.parse_args(__doc__, args) def parse_args(self, doc, args): """Common parsing and setting up of args""" desc = textwrap.dedent(doc) parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=desc, epilog="Copyright (c) 2008-2018 Perforce Software, Inc." ) self.add_parse_args(parser) # Should be implemented by subclass self.options = parser.parse_args(args=args) self.init_logger() self.logger.debug("Command Line Options: %s\n" % self.options) def add_parse_args(self, parser, default_log_file=None, default_verbosity=None): """Default trigger arguments - common to all triggers :param default_verbosity: :param default_log_file: :param parser: """ if not default_log_file: default_log_file = DEFAULT_LOG_FILE if not default_verbosity: default_verbosity = DEFAULT_VERBOSITY parser.add_argument('-p', '--port', default=None, help="Perforce server port - set using %%serverport%%. Default: $P4PORT") parser.add_argument('-u', '--user', default=None, help="Perforce user. Default: $P4USER") parser.add_argument('-L', '--log', default=default_log_file, help="Default: " + default_log_file) parser.add_argument('-i', '--input', help="Name of a file with a list of paths") parser.add_argument('-t', '--ticket', required=True, default=None, help="Perforce ticket for use with Swarm. Output of 'p4 login -ap' for specified or current user") parser.add_argument('path', nargs='*', help="Perforce depot path(s) - required if no input file specified") parser.add_argument('-v', '--verbosity', nargs='?', const="INFO", default=default_verbosity, choices=('DEBUG', 'WARNING', 'INFO', 'ERROR', 'FATAL'), help="Output verbosity level. Default is: " + default_verbosity) def init_logger(self, logger_name=None): if not logger_name: logger_name = LOGGER_NAME self.logger = logging.getLogger(logger_name) self.logger.setLevel(self.options.verbosity) logformat = '%(levelname)s %(asctime)s %(filename)s %(lineno)d: %(message)s' logging.basicConfig(format=logformat, filename=self.options.log, level=self.options.verbosity) formatter = logging.Formatter('%(message)s') ch = logging.StreamHandler(sys.stderr) ch.setLevel(logging.INFO) ch.setFormatter(formatter) self.logger.addHandler(ch) def get_swarm_base_url(self, p4): p = p4.run_property("-l", "-n", "P4.Swarm.URL") url = p[0]['value'] if url[-1] == '/': url = url[:-1] r = requests.get("%s/api/version" % url) j = r.json() if 'apiVersions' in j: return "%s/api/v%d" % (url, j['apiVersions'][-1]) return "%s/api/v4" % url def get_swarm_reviews(self, p4, changes): auth = (p4.user, self.options.ticket) base_url = self.get_swarm_base_url(p4) reviews = [] chunk_size = 40 chunks = [changes[i:i + chunk_size] for i in range(0, len(changes), chunk_size)] self.logger.info("Chunks to process: %d" % len(chunks)) for chunk in chunks: self.logger.info("Processing Swarm chunk") chg_search = ["change[]=%s" % c['change'] for c in chunk] url = '%s/reviews?%s' % (base_url, "&".join(chg_search)) self.logger.debug("Get: %s" % (url)) try: resp = requests.get(url, auth=auth) if resp.status_code == 200: json = resp.json() self.logger.debug("Result: %s" % (json)) if 'reviews' in json: for r in json['reviews']: reviews.append(r) else: self.logger.warn("%d: %s" % (resp.status_code, resp.reason)) except Exception as e: self.logger.exception(e) return reviews def run(self): """Runs script""" try: self.logger.debug("%s: starting" % script_name) p4 = P4.P4() if self.options.port: p4.port = self.options.port if self.options.user: p4.user = self.options.user p4.connect() users = {} for u in p4.run_users(): users[u['User']] = u swarm_comments = SwarmComments(p4.run_keys("-e", "swarm-comment-*")) fields = "path,commit,authorID,authorName,dateSubmitted,reviewID,reviewState,dateUpdated,numVotesUp,numVotesDown," \ "testStatus,numComments,numOpenTasks,numClosedTasks,job,JiraID,JiraStatus,JiraType," \ "reviewType,reviewScope,supportingDocs,reviewDeadline,reviewEffort,JiraSummary,reviewDescription".split(",") print(",".join(fields)) paths = [] if len(self.options.path) > 0: paths.extend(self.options.path) if self.options.input: with open(self.options.input, "r") as f: paths.extend([p.rstrip() for p in f.readlines()]) for path in paths: self.logger.info("Processing %s" % path) # if not path.endswith("/..."): # if path[-1] == "/": # path += "..." # else: # path += "/..." changes = p4.run_changes(path) if not changes: self.logger.warning("No changes found for: %s" % path) rec = OrderedDict() for f in fields: # Have fields in correct order rec[f] = "" rec['path'] = path rec['commit'] = "No changes" msg = ",".join([csv_escape(m) for m in six.itervalues(rec)]) print(msg) continue reviews = self.get_swarm_reviews(p4, changes) reviews_by_chg = {} for r in reviews: for c in r['commits']: reviews_by_chg[c] = r fixes = defaultdict(list) for f in p4.run_fixes(path): fixes[f['Change']].append(f) jobs = {} for j in p4.run_jobs(path): jobs[j['Job']] = j for c in changes: rec = OrderedDict() for f in fields: # Have fields in correct order rec[f] = "" rec['path'] = path rec['commit'] = c['change'] rec['authorID'] = c['user'] rec['authorName'] = users[c['user']]['FullName'] rec['dateSubmitted'] = formatdate(c['time']) try: r = Review(reviews_by_chg[int(c['change'])]) r.addComments(swarm_comments.getReviewComments(r.id())) r.updateValuesFromTemplateDescription(rec) rec['reviewDescription'] = r.description() rec['reviewID'] = r.id() rec['reviewState'] = r.state() rec['dateUpdated'] = formatdate(r.updated()) rec['numVotesUp'] = str(r.numVotesUp()) rec['numVotesDown'] = str(r.numVotesDown()) rec['testStatus'] = r.testStatus() rec['numComments'] = r.numComments() rec['numOpenTasks'] = r.numOpenTasks() rec['numClosedTasks'] = r.numClosedTasks() except Exception as e: pass if c['change'] in fixes: j = fixes[c['change']][0]['Job'] if j in jobs: rec['job'] = j if 'JiraIssue' in jobs[j]: rec['JiraID'] = jobs[j]['JiraIssue'] if 'JiraStatus' in jobs[j]: rec['JiraStatus'] = jobs[j]['JiraStatus'] if 'JiraType' in jobs[j]: rec['JiraType'] = jobs[j]['JiraType'] rec['JiraSummary'] = jobs[j]['Description'].rstrip() try: msg = ",".join([csv_escape(m) for m in six.itervalues(rec)]) print(msg) except Exception as e: msg = ",".join([csv_escape(m).decode('cp1252').encode('utf-8') for m in six.itervalues(rec)]) print(msg) except Exception as e: print(str(e)) if __name__ == '__main__': """ Main Program""" obj = SwarmReviews(*sys.argv[1:]) sys.exit(obj.run())
# | Change | User | Description | Committed | |
---|---|---|---|---|---|
#8 | 26932 | C. Thomas Tyler |
Repurposed the /p4/common/site directory. This directory will exist on a a fresh SDP install, but will be empty save for a ReadMe.txt file explaining that it is to be used to make local extensions to the SDP, and that anything in here is not supported. The coming automated SDP upgrade procedure will know to ignore /p4/common/site directory tree. The p4_vars ensures that /p4/common/site/bin is in the PATH. |
||
#7 | 25793 | Robert Cowham | Avoid utf encoding issues when printing results | ||
#6 | 25789 | Robert Cowham |
Add retry options for http errors. Extra flag for common extension (e.g. for wildcards such as /...) - helps with scripting |
||
#5 | 24491 | Robert Cowham | Version tested on site | ||
#4 | 24490 | Robert Cowham | Extract text from swarm review description | ||
#3 | 24449 | Robert Cowham | Added various new fields. | ||
#2 | 24204 | Robert Cowham | Added more columns. | ||
#1 | 24197 | Robert Cowham | Swarm report - basics |