#!/usr/bin/env python3 # -*- coding: utf-8 -*- # ============================================================================== # Copyright and license info is available in the LICENSE file included with # the Server Deployment Package (SDP), and also available online: # https://swarm.workshop.perforce.com/projects/perforce-software-sdp/view/main/LICENSE # ------------------------------------------------------------------------------ """ NAME: SwarmReviews.py DESCRIPTION: This script prints Swarm reviews as a csv file. Change id, Review id, Review state, Submitter ID, Submitter Name, Date submitted, Date Review Last Updated, No of Votes, Test status, Job ID, Jira Ticket ID (via Job), Jira Ticket Summary (via Job) """ # Python 2.7/3.3 compatibility. from __future__ import print_function import sys import requests import os import textwrap import argparse import logging import P4 import time from collections import defaultdict from six.moves import range import six script_name = os.path.basename(os.path.splitext(__file__)[0]) LOGDIR = os.getenv('LOGS', '/p4/1/logs') DEFAULT_LOG_FILE = "log-%s.log" % script_name if os.path.exists(LOGDIR): DEFAULT_LOG_FILE = os.path.join(LOGDIR, "%s.log" % script_name) DEFAULT_VERBOSITY = 'DEBUG' LOGGER_NAME = 'P4Triggers' def formatdate(epoch): if isinstance(epoch, six.string_types): epoch = int(epoch) t = time.localtime(epoch) return time.strftime("%Y-%m-%d %H:%M:%S", t) class Review(object): "Wrapper around Swarm JSON" def __init__(self, json): self.json = json def getval(self, k): try: if k in self.json: return str(self.json[k]) except: pass return "" def id(self): return self.getval('id') def state(self): return self.getval('state') def author(self): return self.getval('author') def updated(self): return self.getval('updated') def numVotes(self): count = 0 try: if 'participants' in self.json: for p in self.json['participants']: if p and 'vote' in p: count += 1 except: pass return count def testStatus(self): return self.getval('testStatus') class SwarmReviews(object): """See module doc string for details""" def __init__(self, *args, **kwargs): self.parse_args(__doc__, args) def parse_args(self, doc, args): """Common parsing and setting up of args""" desc = textwrap.dedent(doc) parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=desc, epilog="Copyright (c) 2008-2018 Perforce Software, Inc." ) self.add_parse_args(parser) # Should be implemented by subclass self.options = parser.parse_args(args=args) self.init_logger() self.logger.debug("Command Line Options: %s\n" % self.options) def add_parse_args(self, parser, default_log_file=None, default_verbosity=None): """Default trigger arguments - common to all triggers :param default_verbosity: :param default_log_file: :param parser: """ if not default_log_file: default_log_file = DEFAULT_LOG_FILE if not default_verbosity: default_verbosity = DEFAULT_VERBOSITY parser.add_argument('-p', '--port', default=None, help="Perforce server port - set using %%serverport%%. Default: $P4PORT") parser.add_argument('-u', '--user', default=None, help="Perforce user. Default: $P4USER") parser.add_argument('-L', '--log', default=default_log_file, help="Default: " + default_log_file) parser.add_argument('-t', '--ticket', help="Perforce ticket") parser.add_argument('path', nargs='+', help="Perforce depot path") parser.add_argument('-v', '--verbosity', nargs='?', const="INFO", default=default_verbosity, choices=('DEBUG', 'WARNING', 'INFO', 'ERROR', 'FATAL'), help="Output verbosity level. Default is: " + default_verbosity) def init_logger(self, logger_name=None): if not logger_name: logger_name = LOGGER_NAME self.logger = logging.getLogger(logger_name) self.logger.setLevel(self.options.verbosity) logformat = '%(levelname)s %(asctime)s %(filename)s %(lineno)d: %(message)s' logging.basicConfig(format=logformat, filename=self.options.log, level=self.options.verbosity) formatter = logging.Formatter('%(message)s') ch = logging.StreamHandler(sys.stderr) ch.setLevel(logging.INFO) ch.setFormatter(formatter) self.logger.addHandler(ch) def get_swarm_base_url(self, p4): p = p4.run_property("-l", "-n", "P4.Swarm.URL") url = p[0]['value'] r = requests.get("%s/api/version" % url) j = r.json() if 'apiVersions' in j: return "%s/api/v%d" % (url, j['apiVersions'][-1]) return "%s/api/v4" % url def get_swarm_reviews(self, p4, changes): base_url = self.get_swarm_base_url(p4) auth = (p4.user, self.options.ticket) reviews = [] chunk_size = 40 chunks = [changes[i:i + chunk_size] for i in range(0, len(changes), chunk_size)] self.logger.info("Chunks to process: %d" % len(chunks)) for chunk in chunks: self.logger.info("Processing Swarm chunk") chg_search = ["change[]=%s" % c['change'] for c in chunk] url = '%s/reviews?%s' % (base_url, "&".join(chg_search)) self.logger.debug("Get: %s" % (url)) try: resp = requests.get(url, auth=auth) if resp.status_code == 200: json = resp.json() self.logger.debug("Result: %s" % (json)) if 'reviews' in json: for r in json['reviews']: reviews.append(r) else: self.logger.warn("%d: %s" % (resp.status_code, resp.reason)) except Exception as e: self.logger.exception(e) return reviews def run(self): """Runs script""" try: self.logger.debug("%s: starting" % script_name) p4 = P4.P4() if self.options.port: p4.port = self.options.port if self.options.user: p4.user = self.options.user p4.connect() users = {} for u in p4.run_users(): users[u['User']] = u print("path,commit,authorID,authorName,dateSubmitted,reviewID,state,dateUpdated,numVotes,testStatus,job,JiraID,JiraSummary") for path in self.options.path: if not path.endswith("/..."): if path[-1] == "/": path += "..." else: path += "/..." changes = p4.run_changes(path) if not changes: self.logger.warn("No changes found for: %s" % path) continue reviews = self.get_swarm_reviews(p4, changes) reviews_by_chg = {} for r in reviews: for c in r['commits']: reviews_by_chg[c] = r fixes = defaultdict(list) for f in p4.run_fixes(path): fixes[f['Change']].append(f) jobs = {} for j in p4.run_jobs(path): jobs[j['Job']] = j # Change id, Review id, Review state, Submitter ID, Submitter Name, Date submitted, Date Review Last Updated, # No of Votes, Test status, Job ID, Jira Ticket ID (via Job), Jira Ticket Summary (via Job) for c in changes: parts = [path, c['change'], c['user'], '"%s"' % users[c['user']]['FullName'], formatdate(c['time'])] try: r = Review(reviews_by_chg[int(c['change'])]) parts.extend([r.id(), r.state(), formatdate(r.updated()), str(r.numVotes()), r.testStatus()]) except Exception as e: parts.extend(["", "", "", "", ""]) job_info = ["", "", ""] if c['change'] in fixes: j = fixes[c['change']][0]['Job'] job_info[0] = j if j in jobs: job_info = [j, jobs[j]['JiraIssue'], '"%s"' % jobs[j]['Description'].rstrip()] parts.extend(job_info) try: msg = ",".join(parts) print(msg) except: msg = ",".join([m.decode('cp1252').encode('utf-8') for m in parts]) print(msg) except Exception as e: print(str(e)) if __name__ == '__main__': """ Main Program""" obj = SwarmReviews(*sys.argv[1:]) sys.exit(obj.run())
# | Change | User | Description | Committed | |
---|---|---|---|---|---|
#8 | 26932 | C. Thomas Tyler |
Repurposed the /p4/common/site directory. This directory will exist on a a fresh SDP install, but will be empty save for a ReadMe.txt file explaining that it is to be used to make local extensions to the SDP, and that anything in here is not supported. The coming automated SDP upgrade procedure will know to ignore /p4/common/site directory tree. The p4_vars ensures that /p4/common/site/bin is in the PATH. |
||
#7 | 25793 | Robert Cowham | Avoid utf encoding issues when printing results | ||
#6 | 25789 | Robert Cowham |
Add retry options for http errors. Extra flag for common extension (e.g. for wildcards such as /...) - helps with scripting |
||
#5 | 24491 | Robert Cowham | Version tested on site | ||
#4 | 24490 | Robert Cowham | Extract text from swarm review description | ||
#3 | 24449 | Robert Cowham | Added various new fields. | ||
#2 | 24204 | Robert Cowham | Added more columns. | ||
#1 | 24197 | Robert Cowham | Swarm report - basics |