routes.py #1

  • //
  • guest/
  • perforce_software/
  • utils/
  • log_analyzer/
  • psla/
  • app/
  • routes.py
  • View
  • Commits
  • Open Download .zip Download (7 KB)
"""PSLA - Perforce Server Log Analyzer
"""

import os
from flask import Flask, request, redirect, url_for, render_template, flash
from flask_wtf.csrf import CSRFProtect
from werkzeug import secure_filename
from config import Config
from forms import UploadForm
import logging
import pandas as pd
import sqlite3
import traceback
from datetime import datetime, timedelta

import json
from altair import Chart, X, Y, Axis, Data, DataFormat

from app import app
from log2sql import Log2sql

# Chart dimensions
WIDTH = 600
HEIGHT = 300

UPLOAD_FOLDER = '/logs'
ALLOWED_EXTENSIONS = set(['txt'])

app.config.from_object(Config)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
csrf = CSRFProtect(app)

graphs = [{'title': 'Cmds per minute',
           'graph_type': 'line',
           'explanation': 'Maximum number of concurrent commands each minute',
           'x': {'title': 'x axis',
                 'field': 'Minute'},
           'y': {'title': 'y axis',
                 'field': 'MaxCommands'},
           'sql': """
                SELECT SUBSTR(startTime, 0, 17) as 'Minute', MAX(running) as 'MaxCommands'
                FROM process
                GROUP BY Minute;
            """},
          {'title': 'Cmd Totals',
           'explanation': 'How many commands of each type',
           'x': {'title': 'x axis',
                 'field': 'Cmd'},
           'y': {'title': 'y axis',
                 'field': 'NumCmds'},
           'sql': """
                select cmd as 'Cmd', count(cmd) as 'NumCmds'
                from process
                group by cmd
                order by NumCmds;
            """}]

queries = [{'title': 'Cmd Totals',
           'graph_type': 'line',
            'explanation': 'How many commands of each type',

            'sql': """
                select cmd, count(cmd) from process
                group by cmd;
            """},
           {'title': 'Average wait time',
            'explanation': 'Average wait time',
            'sql': """
                SELECT AVG(totalreadWait+totalwriteWait) as wait
                FROM tableUse;
            """},

           {'title': 'Worst lock offenders',
            'explanation': 'Users whose commands hold locks',
            'sql': """
            SELECT user, SUM(maxreadHeld+maxwriteHeld) as held
              FROM tableUse JOIN process USING (processKey)
              GROUP BY user ORDER BY held DESC LIMIT 25;
            """},

           {'title': 'Blocking Commands',
            'explanation': 'Commands that blocked others',
            'sql': """
            	SELECT startTime, endTime, running, cmd, pid,
                    tablename, maxReadHeld,
                    maxWriteHeld, totalReadWait, totalWriteWait
                FROM tableUse JOIN process USING (processKey)
                WHERE processkey = processkey AND (( totalReadHeld > 10000 or
                    totalWriteHeld > 10000 )) ORDER BY startTime, endTime;
            """},
           {'title': 'Block commands - victims of the above',
            'explanation': '',
            'sql': """
                SELECT startTime, endTime, computedLapse, running, cmd, pid,
                    tablename, maxReadHeld,
                    maxWriteHeld,totalReadWait, totalWriteWait
                FROM tableUse JOIN process USING (processKey)
                WHERE processkey = processkey AND (( totalReadWait > 10000 or
                totalWriteWait > 10000 ))
                ORDER BY startTime, endTime;
            """}
           # {'title': '',
           #  'explanation': '',
           #  'sql': """
           #  """},
           ]


def allowed_file(filename):
    return True
    return '.' in filename and \
           filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS

class MyOptions():
    def __init__(self, dbname, logfile, sql=False, verbosity=logging.INFO, outlog=None):
        self.dbname = dbname
        self.logfile = logfile
        self.sql = sql
        self.verbosity = verbosity
        self.interval = 10
        self.outlog = outlog
        self.output = None

@app.route('/')
def index():
    """Front page for application"""
    return render_template('index.html')

@app.route('/uploadLog', methods=['GET', 'POST'])
def uploadLog():
    "Upload log file"
    form = UploadForm()
    app.logger.debug("uploadLog: %s" % form.errors)
    app.logger.debug('------ {0}'.format(request.form))
    if form.is_submitted():
        app.logger.debug(form.errors)

    if form.validate():
        app.logger.debug(form.errors)
    app.logger.debug(form.errors)

    if form.validate_on_submit():
        db_folder = app.config['UPLOAD_FOLDER']
        filename = secure_filename(form.uploadFile.data.filename)
        file_path = os.path.join(db_folder, filename)
        form.uploadFile.data.save(file_path)

        os.chdir(db_folder)
        optionsSQL = MyOptions("db", [file_path], sql=True, outlog='log.out')
        dbname = "%s.db" % optionsSQL.dbname
        if os.path.exists(dbname):
            os.remove(dbname)
        log2sql = Log2sql(optionsSQL)
        log2sql.processLog()
        msgs = []

        return redirect(url_for('analyzeLog'))
    return render_template('uploadLog.html', title='Upload Log', form=form)

@app.route('/analyzeLog')
def analyzeLog():
    "Access previously uploaded log"
    db_folder = app.config['UPLOAD_FOLDER']
    dbname = os.path.join(db_folder, "db.db")
    if not os.path.exists(dbname):
        flash('Database does not exist', 'error')
        return render_template('error.html', title='Database error')

    try:
        conn = sqlite3.connect(dbname)
        tables = []
        for q in queries:
            app.logger.debug("running: %s - %s" % (q['title'], q['sql']))
            if q['sql']:
                start = datetime.now()
                data = pd.read_sql_query(q['sql'], conn)
                end = datetime.now()
                delta = end - start
                tables.append({'data': data.to_html(),
                               'title': q['title'],
                               'explanation': q['explanation'],
                               'sql': q['sql'],
                               'time_taken': str(delta)})
        charts = []
        i = 0
        for q in graphs:
            i += 1
            start = datetime.now()
            df = pd.read_sql_query(q['sql'], conn)
            end = datetime.now()
            delta = end - start
            chart = Chart(data=df, height=HEIGHT, width=WIDTH).mark_line().encode(
                X(q['x']['field'], axis=Axis(title=q['x']['title'])),
                Y(q['y']['field'], axis=Axis(title=q['y']['title']))
            )
            charts.append({'id': "chart-%d" % i,
                           'data': chart.to_json(),
                           'title': q['title'],
                           'explanation': q['explanation'],
                           'sql': q['sql'],
                           'time_taken': str(delta)})
        return render_template('analyzeLog.html', tables=tables, charts=charts, LogFile='t1.log')
    except Exception as e:
        app.logger.error(traceback.format_exc())
        flash('Error: %s' % (str(e)), 'error')
        return render_template('error.html', title='Error in database reporting')

# Change User Description Committed
#18 25220 Robert Cowham Moved project files to new location: //guest/perforce_software/log-analyzer/psla/...
Required by Swarm project structure for workshop.
#17 24989 Robert Cowham Expand schema help for tableuse
#16 24979 Robert Cowham Tidy up charting to add db selection and titles.
Clarify README
#15 24321 Robert Cowham Allow canned queries to be included
#14 24291 Robert Cowham Ensure imports will work for local files
#13 24257 Robert Cowham Output SQL as part of results
#12 23934 Robert Cowham Clean database and auto-create on log upload
#11 23933 Robert Cowham Create new seperate charting page
#10 23924 Robert Cowham Make bar charts and fix axis labels
#9 23890 Robert Cowham Basic progressbar
#8 23845 Robert Cowham Basic query form working
#7 23833 Robert Cowham Document how to run with flask and make /logs configurable
#6 23785 Robert Cowham Tweak interface
Remove unused module
#5 23774 Robert Cowham Ensure chart labels don't overlap
#4 23773 Robert Cowham Only display rest of page when analyzing
#3 23772 Robert Cowham Allow user to select from all db files
#2 23771 Robert Cowham Error handling for queries - refactor and move to seperate file
#1 23765 Robert Cowham Moved things down one level to psla dir to make it easier to see what belongs
//guest/perforce_software/utils/log_analyzer/app/routes.py
#10 23762 Robert Cowham New graph working
#9 23761 Robert Cowham Create a parameterised loop for graphs
#8 23760 Robert Cowham Simple basics with Altair chart
#7 23723 Robert Cowham First basic chart with plotly
#6 23718 Robert Cowham New statements.
Time query execution
#5 23714 Robert Cowham New queries
#4 23712 Robert Cowham Proper formatting of table data
#3 23711 Robert Cowham More or less working with multiple SQL statements
#2 23705 Robert Cowham Basics working in simplified form
#1 23704 Robert Cowham Save before simplifying