"""PSLA - Perforce Server Log Analyzer
"""
import os
from flask import Flask, request, redirect, url_for, render_template, flash
from flask_wtf.csrf import CSRFProtect
from werkzeug import secure_filename
from config import Config
from forms import UploadForm
import logging
import pandas as pd
import sqlite3
import traceback
from datetime import datetime, timedelta
import json
from altair import Chart, X, Y, Axis, Data, DataFormat
from app import app
from log2sql import Log2sql
UPLOAD_FOLDER = '/logs'
ALLOWED_EXTENSIONS = set(['txt'])
app.config.from_object(Config)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
csrf = CSRFProtect(app)
graphs = [{'title': 'Cmds per minute',
'explanation': 'Maximum number of concurrent commands each minute',
'sql': """
SELECT SUBSTR(startTime, 0, 17) as 'Minute', MAX(running) as 'MaxCommands'
FROM process
GROUP BY Minute;
"""}]
queries = [{'title': 'Cmd Totals',
'explanation': 'How many commands of each type',
'sql': """
select cmd, count(cmd) from process
group by cmd;
"""},
{'title': 'Average wait time',
'explanation': 'Average wait time',
'sql': """
SELECT AVG(totalreadWait+totalwriteWait) as wait
FROM tableUse;
"""},
{'title': 'Worst lock offenders',
'explanation': 'Users whose commands hold locks',
'sql': """
SELECT user, SUM(maxreadHeld+maxwriteHeld) as held
FROM tableUse JOIN process USING (processKey)
GROUP BY user ORDER BY held DESC LIMIT 25;
"""},
{'title': 'Blocking Commands',
'explanation': 'Commands that blocked others',
'sql': """
SELECT startTime, endTime, running, cmd, pid,
tablename, maxReadHeld,
maxWriteHeld, totalReadWait, totalWriteWait
FROM tableUse JOIN process USING (processKey)
WHERE processkey = processkey AND (( totalReadHeld > 10000 or
totalWriteHeld > 10000 )) ORDER BY startTime, endTime;
"""},
{'title': 'Block commands - victims of the above',
'explanation': '',
'sql': """
SELECT startTime, endTime, computedLapse, running, cmd, pid,
tablename, maxReadHeld,
maxWriteHeld,totalReadWait, totalWriteWait
FROM tableUse JOIN process USING (processKey)
WHERE processkey = processkey AND (( totalReadWait > 10000 or
totalWriteWait > 10000 ))
ORDER BY startTime, endTime;
"""}
# {'title': '',
# 'explanation': '',
# 'sql': """
# """},
]
def allowed_file(filename):
return True
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
class MyOptions():
def __init__(self, dbname, logfile, sql=False, verbosity=logging.INFO, outlog=None):
self.dbname = dbname
self.logfile = logfile
self.sql = sql
self.verbosity = verbosity
self.interval = 10
self.outlog = outlog
self.output = None
@app.route('/')
def index():
"""Front page for application"""
return render_template('index.html')
@app.route('/uploadLog', methods=['GET', 'POST'])
def uploadLog():
"Upload log file"
form = UploadForm()
app.logger.debug("uploadLog: %s" % form.errors)
app.logger.debug('------ {0}'.format(request.form))
if form.is_submitted():
app.logger.debug(form.errors)
if form.validate():
app.logger.debug(form.errors)
app.logger.debug(form.errors)
if form.validate_on_submit():
db_folder = app.config['UPLOAD_FOLDER']
filename = secure_filename(form.uploadFile.data.filename)
file_path = os.path.join(db_folder, filename)
form.uploadFile.data.save(file_path)
os.chdir(db_folder)
optionsSQL = MyOptions("db", [file_path], sql=True, outlog='log.out')
dbname = "%s.db" % optionsSQL.dbname
if os.path.exists(dbname):
os.remove(dbname)
log2sql = Log2sql(optionsSQL)
log2sql.processLog()
msgs = []
return redirect(url_for('analyzeLog'))
return render_template('uploadLog.html', title='Upload Log', form=form)
@app.route('/analyzeLog')
def analyzeLog():
"Access previously uploaded log"
db_folder = app.config['UPLOAD_FOLDER']
dbname = os.path.join(db_folder, "db.db")
if not os.path.exists(dbname):
flash('Database does not exist', 'error')
return render_template('error.html', title='Database error')
try:
conn = sqlite3.connect(dbname)
tables = []
for q in queries:
app.logger.debug("running: %s - %s" % (q['title'], q['sql']))
if q['sql']:
start = datetime.now()
data = pd.read_sql_query(q['sql'], conn)
end = datetime.now()
delta = end - start
tables.append({'data': data.to_html(),
'title': q['title'],
'explanation': q['explanation'],
'sql': q['sql'],
'time_taken': str(delta)})
for q in graphs:
df = pd.read_sql_query(q['sql'], conn)
# chart = Chart(data=df, height=HEIGHT, width=WIDTH).mark_line().encode(
chart = Chart(data=df).mark_line().encode(
X('Minute', axis=Axis(title='Minute')),
Y('MaxCommands', axis=Axis(title='Max Commands in that minute'))
)
chartJSON = chart.to_json()
return render_template('analyzeLog.html', tables=tables, chartJSON=chartJSON, LogFile='t1.log')
except Exception as e:
app.logger.error(traceback.format_exc())
flash('Error: %s' % (str(e)), 'error')
return render_template('error.html', title='Error in database reporting')