Compare commits

...

2 Commits

Author SHA1 Message Date
Dennis Kerschus
5b0b3badbb m 2023-09-05 09:48:55 +02:00
Dennis Kerschus
3ad6805621 backup 2023-06-28 14:23:38 +02:00
13 changed files with 282 additions and 61 deletions

View File

@ -5,6 +5,10 @@ import FlowTest
from sysjobs import * from sysjobs import *
import sysjob2html import sysjob2html
import pandas as pd import pandas as pd
import logging
from dateutil import tz
from pytz import timezone
import koerselsOverblikUtils
def create_app(test_config=None): def create_app(test_config=None):
# create and configure the app # create and configure the app
@ -25,7 +29,9 @@ def create_app(test_config=None):
os.makedirs(FlowAnalyserMain.instance_path) os.makedirs(FlowAnalyserMain.instance_path)
except OSError: except OSError:
pass pass
@FlowAnalyserMain.context_processor
def inject_debug():
return dict(debug=FlowAnalyserMain.debug)
# a simple page that says hello # a simple page that says hello
@FlowAnalyserMain.route('/hello') @FlowAnalyserMain.route('/hello')
def hello(): def hello():
@ -33,16 +39,37 @@ def create_app(test_config=None):
@FlowAnalyserMain.route('/test') @FlowAnalyserMain.route('/test')
def test(): def test():
listSysjobs=[] listSysjobs=[]
jobs=['BI - Flow - Batch Start Daglig kl. 20.00','BI - Admin - Log Index Stats', 'BI - Flow - MDS Backup','BI - Admin - shrink staging filgruppe samt log filer' listStartSlutjobs=[Sysjobs.getSysjob(session,'BI - Flow - Batch Start Daglig kl. 20.00',True)[0][0]]
,'BI - Flow - Batch Slut Daglig kl. 20.00'] listStartSlutjobs.append(Sysjobs.getSysjob(session,'BI - Flow - Batch Slut Daglig kl. 20.00',True)[0][0])
for job in jobs: listStartSlutjobs=koerselsOverblikUtils.convertToAlike(listStartSlutjobs,0,30,0)
listSysjobs.append(list(Sysjobs.getSysjob(session,job))[0]) sysjobsAlike=koerselsOverblikUtils.timeRangeMerge(listStartSlutjobs, "Batch køretid",0,30,2)
return render_template('index.html', test=listSysjobs) listSysjobs.append(sysjobsAlike)
listSysjobs.extend(listStartSlutjobs)
sysjobs=Sysjobs.getSysjob(session,'% - Admin - %',False)
sysjobs=[a for a in sysjobs if a[0].name not in ["BI - Admin - Kill Blocking Queries","BI - Admin - Flow Job Restarter"]]
if(sysjobs!=None and len(sysjobs)>0):
sysjobs=koerselsOverblikUtils.convertToAlike(sysjobs,0,30,2)
listSysjobs.extend(sysjobs)
listSysjobs = [x for x in listSysjobs if len(x.getMedianDag(0,30,2))>0]
listSysjobs = sorted(listSysjobs, key=lambda x: x.getMedianDag(0,30,2)[0])
return render_template('index.html', test=listSysjobs)#,startSlut=listStartSlutjobs)
@FlowAnalyserMain.route('/test3') @FlowAnalyserMain.route('/test3')
def test3(): def test3():
sysjobs=(Sysjobs.getNattensKoersel(session)) sysjobs=(Sysjobs.getNattensKoersel(session))
return render_template('index3.html', test3=sysjobs) return render_template('index3.html', test3=sysjobs)
@FlowAnalyserMain.route('/test4')
def test4():
sessions=(biadmin_log_ActiveSessionsByInspari.getSessionTimeSpan(session,
datetime.fromisoformat('2023-06-01 23:14:16.817'),
datetime.fromisoformat('2023-06-02 03:14:18.817')))
return render_template('index4.html', test4=sessions)
@FlowAnalyserMain.route('/test4/<sessionID>/<logdate>')
def test4_getSession(sessionID,logdate):
sessions=(biadmin_log_ActiveSessionsByInspari.getSessionByID(session,int(sessionID),
datetime.fromisoformat(logdate).astimezone(tz.gettz('Europe/Copenhagen'))))
return render_template('index4.html', test4=sessions)
@FlowAnalyserMain.route('/test2') @FlowAnalyserMain.route('/test2')
def test2(): def test2():
with Session(engine) as session: with Session(engine) as session:
@ -55,5 +82,8 @@ def create_app(test_config=None):
return FlowAnalyserMain return FlowAnalyserMain
engine=inn.getEngine("msdb") engine=inn.getEngine("msdb")
logging.basicConfig()
logging.getLogger("sqlalchemy.engine").setLevel(logging.DEBUG)
logging.getLogger("sqlalchemy.pool").setLevel(logging.DEBUG)
with Session(engine) as session: with Session(engine) as session:
FlowAnalyserMain=create_app() FlowAnalyserMain=create_app()

Binary file not shown.

Binary file not shown.

Binary file not shown.

2
inn.py
View File

@ -7,7 +7,7 @@ import urllib
def getEngine(database): def getEngine(database):
server = 'bi-dsa-test\dsa' # to specify an alternate port server = 'bi-dsa-test\dsa' # to specify an alternate port
username = 'admindenker' username = 'admindenker'
password = 'biadmin#kode4rm2' password = 'Min!sterTj€n€r€nhv3r$ta7s11g3'
connection_string = "DRIVER={SQL Server};Database="+database+";SERVER="+server connection_string = "DRIVER={SQL Server};Database="+database+";SERVER="+server

55
koerselsOverblikUtils.py Normal file
View File

@ -0,0 +1,55 @@
from datetime import datetime
from typing import List
from sysjobs import Sysjobs
class SysjobsAlike:
name=None
startTime=None
endTime=None
stepId=None
antalDage=None
ugeDage=None
def __init__(self,name):
self.name=name
def getName(self):
return self.name
def getMedianDag(self,stepId,antalDage,ugeDage):
medianDag=[self.startTime,self.endTime]
return medianDag
def mergeOneMore(self,sysjobs:Sysjobs):
medianDag=sysjobs.getMedianDag(self.stepId,self.antalDage,self.ugeDage)
if self.startTime>medianDag[0]:
self.startTime=medianDag[0]
if self.endTime<medianDag[1]:
self.endTime=medianDag[1]
def timeRangeMerge(mainList : List['Sysjobs'], name,stepId,antalDage,ugeDage):
sysjobalike = SysjobsAlike(name)
sysjobalike.stepId=stepId
sysjobalike.antalDage=antalDage
sysjobalike.ugeDage=ugeDage
for job in mainList:
medianDag=job.getMedianDag(stepId,antalDage,ugeDage)
if sysjobalike.startTime is None or sysjobalike.startTime>medianDag[0]:
sysjobalike.startTime=medianDag[0]
if sysjobalike.endTime is None or sysjobalike.endTime<medianDag[1]:
sysjobalike.endTime=medianDag[1]
return sysjobalike
def convertToAlike(mainList : List,stepId,antalDage,ugeDage):
returnList=[]
for job in mainList:
if not isinstance(job,Sysjobs) and isinstance(job[0],Sysjobs):
job=job[0]
jobAlike=SysjobsAlike(job.name)
jobAlike.stepId=stepId
jobAlike.antalDage=antalDage
jobAlike.ugeDage=ugeDage
medianDag=job.getMedianDag(stepId,antalDage,ugeDage)
if len(medianDag)==0:
continue
jobAlike.startTime=medianDag[0]
jobAlike.endTime=medianDag[1]
returnList.append(jobAlike)
return returnList

Binary file not shown.

BIN
static/ssisdb_erd23.pdf Normal file

Binary file not shown.

View File

@ -1,9 +1,8 @@
#from __future__ import annotations #from __future__ import annotations
from typing import List from typing import List
from sqlalchemy import BOOLEAN, Column,INTEGER,NVARCHAR, ForeignKey,Select, and_, or_,DateTime,text,VARCHAR,String,SMALLINT,DECIMAL,DATETIME,BIGINT
from sqlalchemy import BOOLEAN, Column,INTEGER,NVARCHAR, ForeignKey,Select, and_, or_,DateTime,text
from sqlalchemy.dialects.mssql import UNIQUEIDENTIFIER, TINYINT from sqlalchemy.dialects.mssql import UNIQUEIDENTIFIER, TINYINT
from sqlalchemy.orm import relationship,Session,Mapped,mapped_column from sqlalchemy.orm import relationship,Session,Mapped,mapped_column,contains_eager
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
from datetime import datetime,timedelta,time from datetime import datetime,timedelta,time
import json import json
@ -11,6 +10,7 @@ from json import JSONEncoder
#import sysjobhistory #import sysjobhistory
import inn import inn
class MyEncoder(JSONEncoder): class MyEncoder(JSONEncoder):
def default(self, obj): def default(self, obj):
if(isinstance(obj,Sysjobs) or isinstance(obj,Sysjobhistory)): if(isinstance(obj,Sysjobs) or isinstance(obj,Sysjobhistory)):
@ -20,6 +20,16 @@ class MyEncoder(JSONEncoder):
Base = declarative_base() Base = declarative_base()
class DataflowManagement_JobAfhaengighed(Base):
__tablename__ = "JobAfhaengighed"
__table_args__ = { "schema": "dataflowmanagement.flw" }
ParentJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
ChildJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
OprettetDato: Mapped[datetime] = Column(DateTime)
AendretDato: Mapped[datetime] = Column(DateTime)
parent: Mapped[List["Sysjobs"]] = relationship(back_populates="parents",foreign_keys=[ParentJobID])
child: Mapped[List["Sysjobs"]] = relationship(back_populates="children",foreign_keys=[ChildJobID])
class Sysjobs(Base): class Sysjobs(Base):
__tablename__ = "sysjobs" __tablename__ = "sysjobs"
job_id: Mapped[str] = mapped_column(UNIQUEIDENTIFIER,primary_key=True) job_id: Mapped[str] = mapped_column(UNIQUEIDENTIFIER,primary_key=True)
@ -27,6 +37,24 @@ class Sysjobs(Base):
enabled=Column(TINYINT) enabled=Column(TINYINT)
sysjobhistories: Mapped[List["Sysjobhistory"]] = relationship(back_populates="sysjob") sysjobhistories: Mapped[List["Sysjobhistory"]] = relationship(back_populates="sysjob")
dataflow_jobs: Mapped[List["DataflowManagement_JobListe"]] = relationship(back_populates="sysjob") dataflow_jobs: Mapped[List["DataflowManagement_JobListe"]] = relationship(back_populates="sysjob")
sysjobsteps: Mapped[List["msdb_sysjobsteps"]] = relationship(back_populates="sysjob")
# children: Mapped[List["Sysjobs"]] = relationship(secondary="JobAfhaengighed",back_populates="parents",
# primaryjoin="Sysjobs.job_id==DataflowManagement_JobAfhaengighed.ParentJobID",
# secondaryjoin="Sysjobs.job_id==DataflowManagement_JobAfhaengighed.ChildJobID")
# parents: Mapped[List["Sysjobs"]] =relationship(back_populates="children",
# secondary="JobAfhaengighed",
# primaryjoin="Sysjobs.job_id==DataflowManagement_JobAfhaengighed.ChildJobID",
# secondaryjoin="Sysjobs.job_id==DataflowManagement_JobAfhaengighed.ParentJobID"
# )
# children: Mapped[List["Sysjobs"]] = relationship(secondary='JobAfhaengighed',back_populates="parents",
# primaryjoin=job_id==DataflowManagement_JobAfhaengighed.ParentJobID,
# secondaryjoin=job_id==DataflowManagement_JobAfhaengighed.ChildJobID)
# parents: Mapped[List["Sysjobs"]] =relationship(back_populates="children",
# secondary='JobAfhaengighed',
# primaryjoin=job_id==DataflowManagement_JobAfhaengighed.ChildJobID,
# secondaryjoin=job_id==DataflowManagement_JobAfhaengighed.ParentJobID
# )
parents: Mapped[List["DataflowManagement_JobAfhaengighed"]] =relationship(back_populates="child", foreign_keys="DataflowManagement_JobAfhaengighed.ChildJobID") parents: Mapped[List["DataflowManagement_JobAfhaengighed"]] =relationship(back_populates="child", foreign_keys="DataflowManagement_JobAfhaengighed.ChildJobID")
children: Mapped[List["DataflowManagement_JobAfhaengighed"]] = relationship(back_populates="parent", foreign_keys="DataflowManagement_JobAfhaengighed.ParentJobID") children: Mapped[List["DataflowManagement_JobAfhaengighed"]] = relationship(back_populates="parent", foreign_keys="DataflowManagement_JobAfhaengighed.ParentJobID")
@ -53,13 +81,13 @@ class Sysjobs(Base):
def getTest(self,session: Session): def getTest(self,session: Session):
stmt = Select(Sysjobs).join(Sysjobhistory).where(Sysjobhistory.run_date>20230601).distinct() stmt = Select(Sysjobs).join(Sysjobhistory).where(Sysjobhistory.run_date>20230601).distinct()
print(stmt) print(stmt)
with Session(engine) as session: with Session(session) as session:
row : Sysjobs row : Sysjobs
res = session.execute(stmt).all() res = session.execute(stmt).all()
for row in res: for row in res:
print(row.Sysjobs.name + ' ' + str(row.Sysjobhistory.run_date) + ' ' + str(row.Sysjobhistory.run_time)) print(row.Sysjobs.name + ' ' + str(row.Sysjobhistory.run_date) + ' ' + str(row.Sysjobhistory.run_time))
def getMedianDag(self,ugeDag: int,stepID: int,antalDage: int): def getMedianDag(self,stepID: int,antalDage: int,ugeDag: int=0):
session=Session.object_session(self) session=Session.object_session(self)
sqlStr='''DECLARE @dage int = :antalDage sqlStr='''DECLARE @dage int = :antalDage
DECLARE @job VARCHAR(200) = :selfName DECLARE @job VARCHAR(200) = :selfName
@ -78,7 +106,7 @@ FROM
j.name as job_name, j.name as job_name,
run_datetime = CONVERT(DATETIME, '1970-01-01') + run_datetime = CONVERT(DATETIME, '1970-01-01') +
(run_time * 9 + run_time % 10000 * 6 + run_time % 100 * 10) / 216e4, (run_time * 9 + run_time % 10000 * 6 + run_time % 100 * 10) / 216e4,
run_duration = (run_duration/10000*3600 + (run_duration/100)%100*60 + run_duration%100)*100 run_duration = (run_duration/10000*3600 + (run_duration/100)%100*60 + run_duration%100)
from msdb..sysjobhistory h from msdb..sysjobhistory h
inner join msdb..sysjobs j inner join msdb..sysjobs j
ON h.job_id = j.job_id ON h.job_id = j.job_id
@ -94,14 +122,21 @@ FROM
) t ) t
) )
SELECT [ctedateconversion].[MedianRun_datetime] AS MedianRun_datetime_SpecificDate,[ctedateconversion].[run_duration] SELECT [ctedateconversion].[MedianRun_datetime] AS MedianRun_datetime_SpecificDate,DATEADD(SECOND,[ctedateconversion].[run_duration],[ctedateconversion].[MedianRun_datetime])
FROM [ctedateconversion] FROM [ctedateconversion]
WHERE [ctedateconversion].[MedianRun_datetime] = [ctedateconversion].[run_datetime] WHERE [ctedateconversion].[MedianRun_datetime] = [ctedateconversion].[run_datetime]
GROUP BY [ctedateconversion].[MedianRun_datetime], GROUP BY [ctedateconversion].[MedianRun_datetime],
[ctedateconversion].[run_duration]''' [ctedateconversion].[run_duration]'''
stmt=text(sqlStr).params(antalDage=antalDage,selfName=self.name,ugeDag=ugeDag,stepID=stepID) stmt=text(sqlStr).params(antalDage=antalDage,selfName=self.name,ugeDag=ugeDag,stepID=stepID)
res=session.execute(stmt).all() res=session.execute(stmt).all()
return res resResult=[]
if(len(res)>0):
resResult=list(res[0])
if(resResult[0]<datetime(1970, 1, 1, 20, 0,0) and self.name!='BI - Flow - Batch Start Daglig kl. 20.00'):
resResult[0]+= timedelta(days=1)
if(resResult[1]<datetime(1970, 1, 1, 20, 0,0) and self.name!='BI - Flow - Batch Start Daglig kl. 20.00'):
resResult[1]+= timedelta(days=1)
return resResult
def printParent(self, sysjobs:List['Sysjobs']): def printParent(self, sysjobs:List['Sysjobs']):
@ -118,22 +153,36 @@ GROUP BY [ctedateconversion].[MedianRun_datetime],
def getNattensKoersel(session) -> List['Sysjobs']: def getNattensKoersel(session) -> List['Sysjobs']:
natStat=(datetime.today()-timedelta(days=1)).replace(hour=20,minute=0,second=0,microsecond=0) natStat=(datetime.today()-timedelta(days=1)).replace(hour=20,minute=0,second=0,microsecond=0)
resReturn: List['Sysjobs'] = list() resReturn: List['Sysjobs'] = list()
stmt = Select(Sysjobs,Sysjobhistory).join(DataflowManagement_JobListe).join(Sysjobhistory).join(DataflowManagement_JobsForExecution).where(Sysjobhistory.step_id==0).where(DataflowManagement_JobListe.Aktiv==1).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d'))))).distinct() stmt = Select(Sysjobs).join(DataflowManagement_JobListe).join(Sysjobhistory).join(DataflowManagement_JobsForExecution).where(Sysjobhistory.step_id==0).where(DataflowManagement_JobListe.Aktiv==1).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d'))))).distinct()
row : Sysjobs row : Sysjobs
res = session.execute(stmt).all() res = session.execute(stmt).all()
return res return res
def getSysjob(session, sysjobName: str): def getSysjob(session, sysjobName: str, fullName:bool=True,historikDage: int=0):
resReturn=None resReturn=None
natStat=(datetime.today()-timedelta(days=historikDage)).replace(hour=20,minute=0,second=0,microsecond=0)
if(sysjobName!=None): if(sysjobName!=None):
stmt = Select(Sysjobs).where(Sysjobs.name==sysjobName) stmt=Select(Sysjobs)
if(historikDage>0):
stmt=stmt.join(Sysjobhistory).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d')))))
if(fullName==False):
stmt = stmt.where(Sysjobs.name.like(sysjobName))
else:
stmt = stmt.where(Sysjobs.name==sysjobName)
if(historikDage>0):
stmt=stmt.options(contains_eager(Sysjobs.sysjobhistories))
try: try:
resReturn=session.execute(stmt) resReturn=session.execute(stmt).unique().all()
except: except:
session.rollback() session.rollback()
return resReturn return resReturn
def getSmallestWaitOfParent(self):
return
class Sysjobhistory(Base): class Sysjobhistory(Base):
__tablename__ = "sysjobhistory" __tablename__ = "sysjobhistory"
instance_id=Column(INTEGER,primary_key=True) instance_id=Column(INTEGER,primary_key=True)
@ -206,21 +255,14 @@ class DataflowManagement_JobListe(Base):
Aktiv=Column(BOOLEAN) Aktiv=Column(BOOLEAN)
sysjob: Mapped["Sysjobs"] = relationship(back_populates="dataflow_jobs") sysjob: Mapped["Sysjobs"] = relationship(back_populates="dataflow_jobs")
class DataflowManagement_JobAfhaengighed(Base):
__tablename__ = "JobAfhaengighed"
__table_args__ = { "schema": "dataflowmanagement.flw" }
ParentJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
ChildJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
OprettetDato: Mapped[datetime] = Column(DateTime)
AendretDato: Mapped[datetime] = Column(DateTime)
parent: Mapped[List["Sysjobs"]] = relationship(back_populates="parents",foreign_keys=[ParentJobID])
child: Mapped[List["Sysjobs"]] = relationship(back_populates="children",foreign_keys=[ChildJobID])
class DataflowManagement_JobMasterSetup(Base): class DataflowManagement_JobMasterSetup(Base):
__tablename__ = "JobMasterSetup" __tablename__ = "JobMasterSetup"
__table_args__ = { "schema": "dataflowmanagement.flw" } __table_args__ = { "schema": "dataflowmanagement.flw" }
JobID: Mapped[str] = mapped_column(UNIQUEIDENTIFIER,primary_key=True) JobID: Mapped[str] = mapped_column(UNIQUEIDENTIFIER,primary_key=True)
CurrentBatchID: Mapped[int] = mapped_column(INTEGER) CurrentBatchID: Mapped[int] = mapped_column(INTEGER)
MinMellemAfvikling: Mapped[int] = mapped_column(INTEGER)
def getCurrentBatchId(session): def getCurrentBatchId(session):
stmt = Select(DataflowManagement_JobMasterSetup) stmt = Select(DataflowManagement_JobMasterSetup)
@ -231,5 +273,49 @@ class DataflowManagement_JobsForExecution(Base):
__tablename__ = "JobsForExecution" __tablename__ = "JobsForExecution"
__table_args__ = { "schema": "dataflowmanagement.flw" } __table_args__ = { "schema": "dataflowmanagement.flw" }
JobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"),primary_key=True) JobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"),primary_key=True)
BatchID: Mapped[int] = mapped_column("JobMasterSetup.CurrentBatchID") BatchID: Mapped[int] = mapped_column(INTEGER)
ExecutionID: Mapped[int] = mapped_column(primary_key=True)
class msdb_sysjobsteps(Base):
__tablename__ = "sysjobsteps"
__table_args__ = { "schema": "msdb.dbo" }
job_id:Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"),primary_key=True)
step_id: Mapped[int] = mapped_column(INTEGER,primary_key=True)
step_uid: Mapped[str] = mapped_column(UNIQUEIDENTIFIER)
command:Mapped[str] = mapped_column(NVARCHAR)
sysjob: Mapped["Sysjobs"] = relationship(back_populates="sysjobsteps")
class biadmin_log_ActiveSessionsByInspari(Base):
__tablename__="ActiveSessionsByInspari"
__table_args__={"schema":"biadmin.log"}
session_id:Mapped[int]=mapped_column(SMALLINT,primary_key=True)
blocking_session_id:Mapped[int]=mapped_column(SMALLINT)
ElapsedTimeSec:Mapped[float]=mapped_column(DECIMAL)
ElapsedTimeMinutes:Mapped[float]=mapped_column(DECIMAL)
request_start_time:Mapped[datetime]=mapped_column(DateTime)
DatabaseName:Mapped[str]=mapped_column(NVARCHAR)
WaitTimeMs:Mapped[int]=mapped_column(INTEGER)
wait_type:Mapped[str]=mapped_column(NVARCHAR)
host_process_id:Mapped[int]=mapped_column(INTEGER)
MemoryGrantRequestTime:Mapped[datetime]=mapped_column(DateTime)
MemoryGrantGrantedTime:Mapped[datetime]=mapped_column(DateTime)
LogDate:Mapped[datetime]=mapped_column(DATETIME,primary_key=True)
def getSessionTimeSpan(session:Session,beginTime:datetime,Endtime:datetime):
stmt=Select(biadmin_log_ActiveSessionsByInspari).where(and_(biadmin_log_ActiveSessionsByInspari.request_start_time>=beginTime,biadmin_log_ActiveSessionsByInspari.request_start_time<=Endtime))
res=session.execute(stmt).all()
return res
def getSessionByID(session:Session,sessionID:int,logDate:datetime=None):
stmt=Select(biadmin_log_ActiveSessionsByInspari).where(and_(biadmin_log_ActiveSessionsByInspari.session_id==sessionID,biadmin_log_ActiveSessionsByInspari.LogDate==logDate))
res=session.execute(stmt).all()
return res
#class AllExecutionMessages(Base):
class BiAdmin_log_WaitingTasks(Base):
__tablename__="WaitingTasks"
__table_args__={"schema":"biadmin.log"}
session_id:Mapped[int]=mapped_column(SMALLINT,primary_key=True)
wait_duration_ms:Mapped[int]=mapped_column(BIGINT)
wait_type:Mapped[str]=mapped_column(NVARCHAR)
blocking_session_id:Mapped[int]=mapped_column(SMALLINT)
LogDate:Mapped[datetime]=mapped_column(DATETIME,primary_key=True)

View File

@ -4,43 +4,30 @@
<meta charset="UTF-8"> <meta charset="UTF-8">
<title>FlaskBlog</title> <title>FlaskBlog</title>
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script> <script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript"> <script type="text/javascript">
google.charts.load('current', {'packages':['gantt']}); google.charts.load('current', {'packages':['timeline']});
google.charts.setOnLoadCallback(drawChart); google.charts.setOnLoadCallback(drawChart);
function drawChart() {
function drawChart() { var container = document.getElementById('chart_div');
var chart = new google.visualization.Timeline(container);
var data = new google.visualization.DataTable(); var data = new google.visualization.DataTable();
data.addColumn('string', 'Task ID'); data.addColumn({ type: 'string', id: 'President' });
data.addColumn('string', 'Task Name'); data.addColumn({ type: 'string', id: 'President2' });
data.addColumn('string', 'Resource'); data.addColumn({ type: 'date', id: 'Start' });
data.addColumn('date', 'Start Date'); data.addColumn({ type: 'date', id: 'End' });
data.addColumn('date', 'End Date');
data.addColumn('number', 'Duration');
data.addColumn('number', 'Percent Complete');
data.addColumn('string', 'Dependencies');
data.addRows([ data.addRows([
{% for job in test %} {% for job in test %}
['{{ job[0].name }}','{{ job[0].name }}','{{ job[0].name.split(' ')[4][:3] }}', new Date("{{ job[0].getMedianDag(2,0,30)[0][0] }}"),null,{{ job[0].getMedianDag(2,0,30)[0][1] }},100,''], ['{{ job.name }}','{{ job.name }}',new Date("{{ job.getMedianDag(0,30,2)[0] }}"),new Date("{{ job.getMedianDag(0,30,2)[1] }}")],
{% endfor %} {% endfor %}
['BI - Admin - Log Index Stats2','BI - Admin - Log Index Stats','Log', [new Date("1970-01-01 17:00:00"),new Date("1970-01-01 18:00:00")],null,259000,100,''],
]); ]);
var calHeight = data.getNumberOfRows() * 45
var paddingHeight = 50;
var rowHeight = data.getNumberOfRows() * 25;
var chartHeight = rowHeight + paddingHeight;
var options = { var options = {
height: chartHeight, height: calHeight,
gantt: { timeline: { groupByRowLabel: true }
sortTasks: true, };
trackHeight: 30
}
};
var chart = new google.visualization.Gantt(document.getElementById('chart_div'));
chart.draw(data, options); chart.draw(data, options);
} }

62
templates/index4.html Normal file
View File

@ -0,0 +1,62 @@
<html>
<head>
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript">
google.charts.load('current', {'packages':['gantt']});
google.charts.setOnLoadCallback(drawChart);
var refData = new Array()
{% for job in test4 %}
refData.push([{{ job[0].session_id }},new Date("{{ job[0].LogDate }}")])
{% endfor %}
function drawChart() {
var data = new google.visualization.DataTable();
data.addColumn('string', 'Task ID');
data.addColumn('string', 'Task Name');
data.addColumn('string', 'Resource');
data.addColumn('date', 'Start Date');
data.addColumn('date', 'End Date');
data.addColumn('number', 'Duration');
data.addColumn('number', 'Percent Complete');
data.addColumn('string', 'Dependencies');
data.addRows([
{% for job in test4 %}
['{{ job[0].session_id }}','{{ job[0].DatabaseName }}','{{ job[0].DatabaseName }}', new Date("{{ job[0].request_start_time }}"),new Date(new Date("{{ (job[0].request_start_time) }}").setSeconds(new Date("{{ job[0].request_start_time }}").getSeconds()+{{ job[0].ElapsedTimeSec }})),null,null,null],
{% endfor %}
]);
var paddingHeight = 50;
var rowHeight = data.getNumberOfRows() * 25;
var chartHeight = rowHeight + paddingHeight;
var options = {
height: chartHeight,
gantt: {
sortTasks: true,
trackHeight: 30
}
};
var chart = new google.visualization.Gantt(document.getElementById('chart_div'));
google.visualization.events.addListener(chart, 'select', myClickHandler);
function myClickHandler(){
var selection = chart.getSelection();
var ses=refData[selection[0].row][0]
var ldat=refData[selection[0].row][1].toISOString()
var turl='{{ url_for('test4_getSession' ,sessionID='ses',logdate='ldat') }}'
turl = turl.replace('ses', ses);
turl = turl.replace('ldat', ldat);
window.location.assign(turl)
}
chart.draw(data, options);
}
</script>
</head>
<body>
<div id="chart_div"></div>
</body>
</html>

1
test.js Normal file
View File

@ -0,0 +1 @@
new Date.today()