compress the reports :D

This commit is contained in:
aronwk-aaron 2023-11-18 05:46:50 -06:00
parent 259efc81fd
commit d005b497e6
3 changed files with 181 additions and 7 deletions

View File

@ -7,7 +7,6 @@ import logging
from flask_sqlalchemy.query import Query from flask_sqlalchemy.query import Query
from sqlalchemy.dialects import mysql from sqlalchemy.dialects import mysql
from sqlalchemy.exc import OperationalError, StatementError from sqlalchemy.exc import OperationalError, StatementError
from sqlalchemy.types import JSON
from time import sleep from time import sleep
import random import random
import string import string
@ -1018,7 +1017,7 @@ class Reports(db.Model):
__tablename__ = 'reports' __tablename__ = 'reports'
data = db.Column( data = db.Column(
JSON(), mysql.MEDIUMBLOB(),
nullable=False nullable=False
) )

View File

@ -4,8 +4,7 @@ from app.models import CharacterInfo, Account, CharacterXML, Reports
from app.luclient import get_lot_name from app.luclient import get_lot_name
from app import gm_level, scheduler from app import gm_level, scheduler
from sqlalchemy.orm import load_only from sqlalchemy.orm import load_only
import datetime import xmltodict, gzip, json, datetime
import xmltodict
reports_blueprint = Blueprint('reports', __name__) reports_blueprint = Blueprint('reports', __name__)
@ -44,6 +43,8 @@ def index():
@gm_level(3) @gm_level(3)
def items_by_date(date): def items_by_date(date):
data = Reports.query.filter(Reports.date == date).filter(Reports.report_type == "items").first().data data = Reports.query.filter(Reports.date == date).filter(Reports.report_type == "items").first().data
data = gzip.decompress(data)
data = json.loads(data.decode('utf-8'))
return render_template('reports/items/by_date.html.j2', data=data, date=date) return render_template('reports/items/by_date.html.j2', data=data, date=date)
@ -62,6 +63,8 @@ def items_graph(start, end):
datasets = [] datasets = []
# get stuff ready # get stuff ready
for entry in entries: for entry in entries:
entry.data = gzip.decompress(entry.data)
entry.data = json.loads(entry.data.decode('utf-8'))
labels.append(entry.date.strftime("%m/%d/%Y")) labels.append(entry.date.strftime("%m/%d/%Y"))
for key in entry.data: for key in entry.data:
items[key] = get_lot_name(key) items[key] = get_lot_name(key)
@ -104,6 +107,8 @@ def items_graph(start, end):
@gm_level(3) @gm_level(3)
def currency_by_date(date): def currency_by_date(date):
data = Reports.query.filter(Reports.date == date).filter(Reports.report_type == "currency").first().data data = Reports.query.filter(Reports.date == date).filter(Reports.report_type == "currency").first().data
data = gzip.decompress(data)
data = json.loads(data.decode('utf-8'))
return render_template('reports/currency/by_date.html.j2', data=data, date=date) return render_template('reports/currency/by_date.html.j2', data=data, date=date)
@ -121,6 +126,8 @@ def currency_graph(start, end):
datasets = [] datasets = []
# get stuff ready # get stuff ready
for entry in entries: for entry in entries:
entry.data = gzip.decompress(entry.data)
entry.data = json.loads(entry.data.decode('utf-8'))
labels.append(entry.date.strftime("%m/%d/%Y")) labels.append(entry.date.strftime("%m/%d/%Y"))
for character in characters: for character in characters:
data = [] data = []
@ -155,6 +162,8 @@ def currency_graph(start, end):
@gm_level(3) @gm_level(3)
def uscore_by_date(date): def uscore_by_date(date):
data = Reports.query.filter(Reports.date == date).filter(Reports.report_type == "uscore").first().data data = Reports.query.filter(Reports.date == date).filter(Reports.report_type == "uscore").first().data
data = gzip.decompress(data)
data = json.loads(data.decode('utf-8'))
return render_template('reports/uscore/by_date.html.j2', data=data, date=date) return render_template('reports/uscore/by_date.html.j2', data=data, date=date)
@ -172,6 +181,8 @@ def uscore_graph(start, end):
datasets = [] datasets = []
# get stuff ready # get stuff ready
for entry in entries: for entry in entries:
entry.data = gzip.decompress(entry.data)
entry.data = json.loads(entry.data.decode('utf-8'))
labels.append(entry.date.strftime("%m/%d/%Y")) labels.append(entry.date.strftime("%m/%d/%Y"))
for character in characters: for character in characters:
data = [] data = []
@ -260,7 +271,7 @@ def gen_item_report():
current_app.logger.error(f"REPORT::ITEMS - {e}") current_app.logger.error(f"REPORT::ITEMS - {e}")
new_report = Reports( new_report = Reports(
data=report_data, data=gzip.compress(json.dumps(report_data).encode('utf-8')),
report_type="items", report_type="items",
date=date date=date
) )
@ -308,7 +319,7 @@ def gen_currency_report():
current_app.logger.error(f"REPORT::CURRENCY - {e}") current_app.logger.error(f"REPORT::CURRENCY - {e}")
new_report = Reports( new_report = Reports(
data=report_data, data=gzip.compress(json.dumps(report_data).encode('utf-8')),
report_type="currency", report_type="currency",
date=date date=date
) )
@ -356,7 +367,7 @@ def gen_uscore_report():
current_app.logger.error(f"REPORT::U-SCORE - {e}") current_app.logger.error(f"REPORT::U-SCORE - {e}")
new_report = Reports( new_report = Reports(
data=report_data, data=gzip.compress(json.dumps(report_data).encode('utf-8')),
report_type="uscore", report_type="uscore",
date=date date=date
) )

View File

@ -0,0 +1,164 @@
"""compressss reports
Revision ID: 1164e037907f
Revises: a6e42ef03da7
Create Date: 2023-11-18 01:38:00.127472
"""
from alembic import op
from sqlalchemy.orm.session import Session
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from sqlalchemy.types import JSON
from sqlalchemy.ext.declarative import declarative_base
import gzip
import json
Base = declarative_base()
# revision identifiers, used by Alembic.
revision = '1164e037907f'
down_revision = 'a6e42ef03da7'
branch_labels = None
depends_on = None
class ReportsUpgradeNew(Base):
__tablename__ = 'reports'
__table_args__ = {'extend_existing': True}
data = sa.Column(
mysql.MEDIUMBLOB(),
nullable=False
)
report_type = sa.Column(
sa.VARCHAR(35),
nullable=False,
primary_key=True,
autoincrement=False
)
date = sa.Column(
sa.Date(),
primary_key=True,
autoincrement=False
)
def save(self):
sa.session.add(self)
sa.session.commit()
sa.session.refresh(self)
class ReportsUpgradeOld(Base):
__tablename__ = 'reports_old'
__table_args__ = {'extend_existing': True}
data = sa.Column(
JSON(),
nullable=False
)
report_type = sa.Column(
sa.VARCHAR(35),
nullable=False,
primary_key=True,
autoincrement=False
)
date = sa.Column(
sa.Date(),
primary_key=True,
autoincrement=False
)
class ReportsDowngradeOld(Base):
__tablename__ = 'reports'
__table_args__ = {'extend_existing': True}
data = sa.Column(
mysql.MEDIUMBLOB(),
nullable=False
)
report_type = sa.Column(
sa.VARCHAR(35),
nullable=False,
primary_key=True,
autoincrement=False
)
date = sa.Column(
sa.Date(),
primary_key=True,
autoincrement=False
)
def save(self):
sa.session.add(self)
sa.session.commit()
sa.session.refresh(self)
class ReportsDowngradeNew(Base):
__tablename__ = 'reports_old'
__table_args__ = {'extend_existing': True}
data = sa.Column(
JSON(),
nullable=False
)
report_type = sa.Column(
sa.VARCHAR(35),
nullable=False,
primary_key=True,
autoincrement=False
)
date = sa.Column(
sa.Date(),
primary_key=True,
autoincrement=False
)
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.rename_table('reports', 'reports_old')
bind = op.get_bind()
session = Session(bind=bind)
reports = session.query(ReportsUpgradeOld)
op.create_table('reports',
sa.Column('data', mysql.MEDIUMBLOB(), nullable=False),
sa.Column('report_type', sa.VARCHAR(length=35), autoincrement=False, nullable=False),
sa.Column('date', sa.Date(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('report_type', 'date')
)
# insert records
new_reports = []
# insert records
for report in reports:
new_reports.append({
"data":gzip.compress(json.dumps(report.data).encode('utf-8')),
"report_type":report.report_type,
"date":report.date
})
op.bulk_insert(ReportsUpgradeNew.__table__, new_reports)
op.drop_table('reports_old')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('reports')
op.create_table('reports',
sa.Column('data', JSON(), nullable=False),
sa.Column('report_type', sa.VARCHAR(length=35), autoincrement=False, nullable=False),
sa.Column('date', sa.Date(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('report_type', 'date')
)
# ### end Alembic commands ###