This commit is contained in:
Dennis Kerschus 2023-06-28 14:23:38 +02:00
parent 127243803e
commit 3ad6805621
5 changed files with 81 additions and 48 deletions

View File

@ -33,11 +33,18 @@ def create_app(test_config=None):
@FlowAnalyserMain.route('/test')
def test():
listSysjobs=[]
jobs=['BI - Flow - Batch Start Daglig kl. 20.00','BI - Admin - Log Index Stats', 'BI - Flow - MDS Backup','BI - Admin - shrink staging filgruppe samt log filer'
,'BI - Flow - Batch Slut Daglig kl. 20.00']
for job in jobs:
listSysjobs.append(list(Sysjobs.getSysjob(session,job))[0])
return render_template('index.html', test=listSysjobs)
listStartSlutjobs=[]
listStartSlutjobs.append(Sysjobs.getSysjob(session,'BI - Flow - Batch Start Daglig kl. 20.00',True)[0])
listStartSlutjobs.append(Sysjobs.getSysjob(session,'BI - Flow - Batch Slut Daglig kl. 20.00',True)[0])
listSysjobs.append(listStartSlutjobs[0][0])
sysjobs=Sysjobs.getSysjob(session,'% - Admin - %',False)
if(sysjobs!=None and len(sysjobs)>0):
for i in sysjobs:
listSysjobs.append(i[0])
listSysjobs.append(listStartSlutjobs[1][0])
listSysjobs = [x for x in listSysjobs if len(x.getMedianDag(0,30,2))>0]
listSysjobs = sorted(listSysjobs, key=lambda x: x.getMedianDag(0,30,2)[0])
return render_template('index.html', test=listSysjobs,startSlut=listStartSlutjobs)
@FlowAnalyserMain.route('/test3')
def test3():
sysjobs=(Sysjobs.getNattensKoersel(session))

Binary file not shown.

View File

@ -1,9 +1,9 @@
#from __future__ import annotations
from typing import List
from sqlalchemy import BOOLEAN, Column,INTEGER,NVARCHAR, ForeignKey,Select, and_, or_,DateTime,text
from sqlalchemy import BOOLEAN, Column,INTEGER,NVARCHAR, ForeignKey,Select, and_, or_,DateTime,text,VARCHAR
from sqlalchemy.dialects.mssql import UNIQUEIDENTIFIER, TINYINT
from sqlalchemy.orm import relationship,Session,Mapped,mapped_column
from sqlalchemy.orm import relationship,Session,Mapped,mapped_column,contains_eager
from sqlalchemy.ext.declarative import declarative_base
from datetime import datetime,timedelta,time
import json
@ -27,6 +27,8 @@ class Sysjobs(Base):
enabled=Column(TINYINT)
sysjobhistories: Mapped[List["Sysjobhistory"]] = relationship(back_populates="sysjob")
dataflow_jobs: Mapped[List["DataflowManagement_JobListe"]] = relationship(back_populates="sysjob")
sysjobsteps: Mapped[List["msdb_sysjobsteps"]] = relationship(back_populates="sysjob")
parents: Mapped[List["DataflowManagement_JobAfhaengighed"]] =relationship(back_populates="child", foreign_keys="DataflowManagement_JobAfhaengighed.ChildJobID")
children: Mapped[List["DataflowManagement_JobAfhaengighed"]] = relationship(back_populates="parent", foreign_keys="DataflowManagement_JobAfhaengighed.ParentJobID")
@ -59,7 +61,7 @@ class Sysjobs(Base):
for row in res:
print(row.Sysjobs.name + ' ' + str(row.Sysjobhistory.run_date) + ' ' + str(row.Sysjobhistory.run_time))
def getMedianDag(self,ugeDag: int,stepID: int,antalDage: int):
def getMedianDag(self,stepID: int,antalDage: int,ugeDag: int=0):
session=Session.object_session(self)
sqlStr='''DECLARE @dage int = :antalDage
DECLARE @job VARCHAR(200) = :selfName
@ -78,7 +80,7 @@ FROM
j.name as job_name,
run_datetime = CONVERT(DATETIME, '1970-01-01') +
(run_time * 9 + run_time % 10000 * 6 + run_time % 100 * 10) / 216e4,
run_duration = (run_duration/10000*3600 + (run_duration/100)%100*60 + run_duration%100)*100
run_duration = (run_duration/10000*3600 + (run_duration/100)%100*60 + run_duration%100)
from msdb..sysjobhistory h
inner join msdb..sysjobs j
ON h.job_id = j.job_id
@ -94,14 +96,21 @@ FROM
) t
)
SELECT [ctedateconversion].[MedianRun_datetime] AS MedianRun_datetime_SpecificDate,[ctedateconversion].[run_duration]
SELECT [ctedateconversion].[MedianRun_datetime] AS MedianRun_datetime_SpecificDate,DATEADD(SECOND,[ctedateconversion].[run_duration],[ctedateconversion].[MedianRun_datetime])
FROM [ctedateconversion]
WHERE [ctedateconversion].[MedianRun_datetime] = [ctedateconversion].[run_datetime]
GROUP BY [ctedateconversion].[MedianRun_datetime],
[ctedateconversion].[run_duration]'''
stmt=text(sqlStr).params(antalDage=antalDage,selfName=self.name,ugeDag=ugeDag,stepID=stepID)
res=session.execute(stmt).all()
return res
resResult=[]
if(len(res)>0):
resResult=list(res[0])
if(resResult[0]<datetime(1970, 1, 1, 20, 0,0) and self.name!='BI - Flow - Batch Start Daglig kl. 20.00'):
resResult[0]+= timedelta(days=1)
if(resResult[1]<datetime(1970, 1, 1, 20, 0,0) and self.name!='BI - Flow - Batch Start Daglig kl. 20.00'):
resResult[1]+= timedelta(days=1)
return resResult
def printParent(self, sysjobs:List['Sysjobs']):
@ -118,21 +127,34 @@ GROUP BY [ctedateconversion].[MedianRun_datetime],
def getNattensKoersel(session) -> List['Sysjobs']:
natStat=(datetime.today()-timedelta(days=1)).replace(hour=20,minute=0,second=0,microsecond=0)
resReturn: List['Sysjobs'] = list()
stmt = Select(Sysjobs,Sysjobhistory).join(DataflowManagement_JobListe).join(Sysjobhistory).join(DataflowManagement_JobsForExecution).where(Sysjobhistory.step_id==0).where(DataflowManagement_JobListe.Aktiv==1).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d'))))).distinct()
stmt = Select(Sysjobs).join(DataflowManagement_JobListe).join(Sysjobhistory).join(DataflowManagement_JobsForExecution).where(Sysjobhistory.step_id==0).where(DataflowManagement_JobListe.Aktiv==1).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d'))))).distinct()
row : Sysjobs
stmt.options(contains_eager(Sysjobs.sysjobhistories), contains_eager(Sysjobs.parents), contains_eager(DataflowManagement_JobAfhaengighed.parent))
res = session.execute(stmt).all()
return res
def getSysjob(session, sysjobName: str):
def getSysjob(session, sysjobName: str, fullName:bool=True,historikDage: int=0):
resReturn=None
natStat=(datetime.today()-timedelta(days=historikDage)).replace(hour=20,minute=0,second=0,microsecond=0)
if(sysjobName!=None):
stmt = Select(Sysjobs).where(Sysjobs.name==sysjobName)
stmt=Select(Sysjobs)
if(historikDage>0):
stmt=stmt.join(Sysjobhistory).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d')))))
if(fullName==False):
stmt = stmt.where(Sysjobs.name.like(sysjobName))
else:
stmt = stmt.where(Sysjobs.name==sysjobName)
if(historikDage>0):
stmt=stmt.options(contains_eager(Sysjobs.sysjobhistories))
try:
resReturn=session.execute(stmt)
resReturn=session.execute(stmt).unique().all()
except:
session.rollback()
return resReturn
def getSmallestWaitOfParent(self):
return
class Sysjobhistory(Base):
__tablename__ = "sysjobhistory"
@ -206,6 +228,7 @@ class DataflowManagement_JobListe(Base):
Aktiv=Column(BOOLEAN)
sysjob: Mapped["Sysjobs"] = relationship(back_populates="dataflow_jobs")
class DataflowManagement_JobAfhaengighed(Base):
__tablename__ = "JobAfhaengighed"
__table_args__ = { "schema": "dataflowmanagement.flw" }
@ -221,6 +244,7 @@ class DataflowManagement_JobMasterSetup(Base):
__table_args__ = { "schema": "dataflowmanagement.flw" }
JobID: Mapped[str] = mapped_column(UNIQUEIDENTIFIER,primary_key=True)
CurrentBatchID: Mapped[int] = mapped_column(INTEGER)
MinMellemAfvikling: Mapped[int] = mapped_column(INTEGER)
def getCurrentBatchId(session):
stmt = Select(DataflowManagement_JobMasterSetup)
@ -231,5 +255,20 @@ class DataflowManagement_JobsForExecution(Base):
__tablename__ = "JobsForExecution"
__table_args__ = { "schema": "dataflowmanagement.flw" }
JobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"),primary_key=True)
BatchID: Mapped[int] = mapped_column("JobMasterSetup.CurrentBatchID")
BatchID: Mapped[int] = mapped_column(INTEGER)
ExecutionID: Mapped[int] = mapped_column(primary_key=True)
class msdb_sysjobsteps(Base):
__tablename__ = "sysjobsteps"
__table_args__ = { "schema": "msdb.dbo" }
job_id:Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"),primary_key=True)
step_id: Mapped[int] = mapped_column(INTEGER,primary_key=True)
step_uid: Mapped[str] = mapped_column(UNIQUEIDENTIFIER)
command:Mapped[str] = mapped_column(NVARCHAR)
sysjob: Mapped["Sysjobs"] = relationship(back_populates="sysjobsteps")
#class AllExecutionMessages(Base):

View File

@ -5,43 +5,30 @@
<title>FlaskBlog</title>
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript">
google.charts.load('current', {'packages':['gantt']});
google.charts.load('current', {'packages':['timeline']});
google.charts.setOnLoadCallback(drawChart);
function drawChart() {
var container = document.getElementById('chart_div');
var chart = new google.visualization.Timeline(container);
var data = new google.visualization.DataTable();
data.addColumn('string', 'Task ID');
data.addColumn('string', 'Task Name');
data.addColumn('string', 'Resource');
data.addColumn('date', 'Start Date');
data.addColumn('date', 'End Date');
data.addColumn('number', 'Duration');
data.addColumn('number', 'Percent Complete');
data.addColumn('string', 'Dependencies');
data.addColumn({ type: 'string', id: 'President' });
data.addColumn({ type: 'string', id: 'President2' });
data.addColumn({ type: 'date', id: 'Start' });
data.addColumn({ type: 'date', id: 'End' });
data.addRows([
['Normal flow','Normal flow',new Date("{{ startSlut[0][0].getMedianDag(0,30,2)[0] }}"),new Date("{{ startSlut[1][0].getMedianDag(0,30,2)[0] }}")],
{% for job in test %}
['{{ job[0].name }}','{{ job[0].name }}','{{ job[0].name.split(' ')[4][:3] }}', new Date("{{ job[0].getMedianDag(2,0,30)[0][0] }}"),null,{{ job[0].getMedianDag(2,0,30)[0][1] }},100,''],
['{{ job.name }}','{{ job.name }}',new Date("{{ job.getMedianDag(0,30,2)[0] }}"),new Date("{{ job.getMedianDag(0,30,2)[1] }}")],
{% endfor %}
['BI - Admin - Log Index Stats2','BI - Admin - Log Index Stats','Log', [new Date("1970-01-01 17:00:00"),new Date("1970-01-01 18:00:00")],null,259000,100,''],
]);
var paddingHeight = 50;
var rowHeight = data.getNumberOfRows() * 25;
var chartHeight = rowHeight + paddingHeight;
var calHeight = data.getNumberOfRows() * 45
var options = {
height: chartHeight,
gantt: {
sortTasks: true,
trackHeight: 30
}
height: calHeight,
timeline: { groupByRowLabel: true }
};
var chart = new google.visualization.Gantt(document.getElementById('chart_div'));
chart.draw(data, options);
}
</script>