test af overblik

This commit is contained in:
Dennis Kerschus 2023-06-20 15:24:48 +02:00
parent 834c6de183
commit 127243803e
9 changed files with 246 additions and 13 deletions

View File

@ -32,8 +32,16 @@ def create_app(test_config=None):
return FlowTest.test()
@FlowAnalyserMain.route('/test')
def test():
html=''
return render_template('index.html', test=html)
listSysjobs=[]
jobs=['BI - Flow - Batch Start Daglig kl. 20.00','BI - Admin - Log Index Stats', 'BI - Flow - MDS Backup','BI - Admin - shrink staging filgruppe samt log filer'
,'BI - Flow - Batch Slut Daglig kl. 20.00']
for job in jobs:
listSysjobs.append(list(Sysjobs.getSysjob(session,job))[0])
return render_template('index.html', test=listSysjobs)
@FlowAnalyserMain.route('/test3')
def test3():
sysjobs=(Sysjobs.getNattensKoersel(session))
return render_template('index3.html', test3=sysjobs)
@FlowAnalyserMain.route('/test2')
def test2():
@ -47,5 +55,5 @@ def create_app(test_config=None):
return FlowAnalyserMain
engine=inn.getEngine("msdb")
FlowAnalyserMain=create_app()
with Session(engine) as session:
FlowAnalyserMain=create_app()

Binary file not shown.

Binary file not shown.

2
inn.py
View File

@ -5,7 +5,7 @@ from sqlalchemy import sql,Table,select,MetaData
import urllib
def getEngine(database):
server = 'bi-dsa-udv\dsa' # to specify an alternate port
server = 'bi-dsa-test\dsa' # to specify an alternate port
username = 'admindenker'
password = 'biadmin#kode4rm2'

View File

@ -1,7 +1,7 @@
#from __future__ import annotations
from typing import List
from sqlalchemy import BOOLEAN, Column,INTEGER,NVARCHAR, ForeignKey,Select, and_, or_
from sqlalchemy import BOOLEAN, Column,INTEGER,NVARCHAR, ForeignKey,Select, and_, or_,DateTime,text
from sqlalchemy.dialects.mssql import UNIQUEIDENTIFIER, TINYINT
from sqlalchemy.orm import relationship,Session,Mapped,mapped_column
from sqlalchemy.ext.declarative import declarative_base
@ -27,6 +27,8 @@ class Sysjobs(Base):
enabled=Column(TINYINT)
sysjobhistories: Mapped[List["Sysjobhistory"]] = relationship(back_populates="sysjob")
dataflow_jobs: Mapped[List["DataflowManagement_JobListe"]] = relationship(back_populates="sysjob")
parents: Mapped[List["DataflowManagement_JobAfhaengighed"]] =relationship(back_populates="child", foreign_keys="DataflowManagement_JobAfhaengighed.ChildJobID")
children: Mapped[List["DataflowManagement_JobAfhaengighed"]] = relationship(back_populates="parent", foreign_keys="DataflowManagement_JobAfhaengighed.ParentJobID")
def __iter__(self):
yield from {
@ -57,15 +59,79 @@ class Sysjobs(Base):
for row in res:
print(row.Sysjobs.name + ' ' + str(row.Sysjobhistory.run_date) + ' ' + str(row.Sysjobhistory.run_time))
def getMedianDag(self,ugeDag: int,stepID: int,antalDage: int):
session=Session.object_session(self)
sqlStr='''DECLARE @dage int = :antalDage
DECLARE @job VARCHAR(200) = :selfName
DECLARE @ugeDag INT = :ugeDag
DECLARE @stepID INT = :stepID
;WITH ctedateconversion AS(
SELECT PERCENTILE_DISC(0.5) WITHIN GROUP (ORDER BY run_datetime) OVER() AS MedianRun_datetime
, [t].[run_datetime]
, PERCENTILE_DISC(0.5) WITHIN GROUP (ORDER BY [t].[run_duration]) OVER() AS run_duration
FROM
(
SELECT job_name, run_datetime, run_duration
from
(
SELECT TOP (@dage)
j.name as job_name,
run_datetime = CONVERT(DATETIME, '1970-01-01') +
(run_time * 9 + run_time % 10000 * 6 + run_time % 100 * 10) / 216e4,
run_duration = (run_duration/10000*3600 + (run_duration/100)%100*60 + run_duration%100)*100
from msdb..sysjobhistory h
inner join msdb..sysjobs j
ON h.job_id = j.job_id
WHERE
[j].[name] LIKE @job AND [h].[step_id]=@stepID
'''
if(ugeDag!=0):
sqlStr+=''' AND DATEPART(WEEKDAY,CONVERT(DATETIME, RTRIM(run_date))
+ (run_time * 9 + run_time % 10000 * 6 + run_time % 100 * 10) / 216e4)=@ugeDag --1 = mandag
'''
sqlStr+=''' ORDER BY [h].[run_date] DESC
) t
) t
)
SELECT [ctedateconversion].[MedianRun_datetime] AS MedianRun_datetime_SpecificDate,[ctedateconversion].[run_duration]
FROM [ctedateconversion]
WHERE [ctedateconversion].[MedianRun_datetime] = [ctedateconversion].[run_datetime]
GROUP BY [ctedateconversion].[MedianRun_datetime],
[ctedateconversion].[run_duration]'''
stmt=text(sqlStr).params(antalDage=antalDage,selfName=self.name,ugeDag=ugeDag,stepID=stepID)
res=session.execute(stmt).all()
return res
def printParent(self, sysjobs:List['Sysjobs']):
resReturn=''
if(self.name=='BI - Flow - Batch Start Daglig kl. 20.00'): #or self.name=='BI - Flow - Batch Slut Daglig kl. 20.00'):
return ''
for parent in self.parents:
if(any(parent.parent in sysjob for sysjob in sysjobs)):
if(len(resReturn)>0):
resReturn+=','
resReturn+=parent.parent.name
return resReturn
def getNattensKoersel(session) -> List['Sysjobs']:
natStat=(datetime.today()-timedelta(days=1)).replace(hour=20,minute=0,second=0,microsecond=0)
resReturn: List['Sysjobs'] = list()
stmt = Select(Sysjobs,Sysjobhistory).join(DataflowManagement_JobListe).join(Sysjobhistory).where(Sysjobhistory.step_id==0).where(DataflowManagement_JobListe.Aktiv==1).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d'))))).distinct()
stmt = Select(Sysjobs,Sysjobhistory).join(DataflowManagement_JobListe).join(Sysjobhistory).join(DataflowManagement_JobsForExecution).where(Sysjobhistory.step_id==0).where(DataflowManagement_JobListe.Aktiv==1).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d'))))).distinct()
row : Sysjobs
res = session.execute(stmt).all()
# resReturn=[x[0] for x in res]
return res
def getSysjob(session, sysjobName: str):
resReturn=None
if(sysjobName!=None):
stmt = Select(Sysjobs).where(Sysjobs.name==sysjobName)
try:
resReturn=session.execute(stmt)
except:
session.rollback()
return resReturn
class Sysjobhistory(Base):
@ -138,4 +204,32 @@ class DataflowManagement_JobListe(Base):
__table_args__ = { "schema": "dataflowmanagement.flw" }
JobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"),primary_key=True)
Aktiv=Column(BOOLEAN)
sysjob: Mapped["Sysjobs"] = relationship(back_populates="dataflow_jobs")
sysjob: Mapped["Sysjobs"] = relationship(back_populates="dataflow_jobs")
class DataflowManagement_JobAfhaengighed(Base):
__tablename__ = "JobAfhaengighed"
__table_args__ = { "schema": "dataflowmanagement.flw" }
ParentJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
ChildJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
OprettetDato: Mapped[datetime] = Column(DateTime)
AendretDato: Mapped[datetime] = Column(DateTime)
parent: Mapped[List["Sysjobs"]] = relationship(back_populates="parents",foreign_keys=[ParentJobID])
child: Mapped[List["Sysjobs"]] = relationship(back_populates="children",foreign_keys=[ChildJobID])
class DataflowManagement_JobMasterSetup(Base):
__tablename__ = "JobMasterSetup"
__table_args__ = { "schema": "dataflowmanagement.flw" }
JobID: Mapped[str] = mapped_column(UNIQUEIDENTIFIER,primary_key=True)
CurrentBatchID: Mapped[int] = mapped_column(INTEGER)
def getCurrentBatchId(session):
stmt = Select(DataflowManagement_JobMasterSetup)
res = session.execute(stmt).all()
return res
class DataflowManagement_JobsForExecution(Base):
__tablename__ = "JobsForExecution"
__table_args__ = { "schema": "dataflowmanagement.flw" }
JobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"),primary_key=True)
BatchID: Mapped[int] = mapped_column("JobMasterSetup.CurrentBatchID")

View File

@ -3,7 +3,50 @@
<head>
<meta charset="UTF-8">
<title>FlaskBlog</title>
</head>
<body>
</body>
</html>
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript">
google.charts.load('current', {'packages':['gantt']});
google.charts.setOnLoadCallback(drawChart);
function drawChart() {
var data = new google.visualization.DataTable();
data.addColumn('string', 'Task ID');
data.addColumn('string', 'Task Name');
data.addColumn('string', 'Resource');
data.addColumn('date', 'Start Date');
data.addColumn('date', 'End Date');
data.addColumn('number', 'Duration');
data.addColumn('number', 'Percent Complete');
data.addColumn('string', 'Dependencies');
data.addRows([
{% for job in test %}
['{{ job[0].name }}','{{ job[0].name }}','{{ job[0].name.split(' ')[4][:3] }}', new Date("{{ job[0].getMedianDag(2,0,30)[0][0] }}"),null,{{ job[0].getMedianDag(2,0,30)[0][1] }},100,''],
{% endfor %}
['BI - Admin - Log Index Stats2','BI - Admin - Log Index Stats','Log', [new Date("1970-01-01 17:00:00"),new Date("1970-01-01 18:00:00")],null,259000,100,''],
]);
var paddingHeight = 50;
var rowHeight = data.getNumberOfRows() * 25;
var chartHeight = rowHeight + paddingHeight;
var options = {
height: chartHeight,
gantt: {
sortTasks: true,
trackHeight: 30
}
};
var chart = new google.visualization.Gantt(document.getElementById('chart_div'));
chart.draw(data, options);
}
</script>
</head>
<body>
<div id="chart_div"></div>
</body>
</html>

48
templates/index3.html Normal file
View File

@ -0,0 +1,48 @@
<html>
<head>
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript">
google.charts.load('current', {'packages':['gantt']});
google.charts.setOnLoadCallback(drawChart);
function drawChart() {
var data = new google.visualization.DataTable();
data.addColumn('string', 'Task ID');
data.addColumn('string', 'Task Name');
data.addColumn('string', 'Resource');
data.addColumn('date', 'Start Date');
data.addColumn('date', 'End Date');
data.addColumn('number', 'Duration');
data.addColumn('number', 'Percent Complete');
data.addColumn('string', 'Dependencies');
data.addRows([
{% for job in test3 %}
['{{ job[0].name }}','{{ job[0].name }}','{{ job[0].name.split(' ')[4][:3] }}', new Date("{{ job[1].getStartTime() }}"),new Date("{{ job[1].getEndTime() }}"),null,100,'{{ job[0].printParent(test3) }}'],
{% endfor %}
]);
var paddingHeight = 50;
var rowHeight = data.getNumberOfRows() * 25;
var chartHeight = rowHeight + paddingHeight;
var options = {
height: chartHeight,
gantt: {
sortTasks: true,
trackHeight: 30
}
};
var chart = new google.visualization.Gantt(document.getElementById('chart_div'));
chart.draw(data, options);
}
</script>
</head>
<body>
<div id="chart_div"></div>
</body>
</html>

40
test.md
View File

@ -55,3 +55,43 @@ BI - Flow - DSI_LUNAADM : 2023-06-11 20:01:13,0:03:03
};
mermaid.initialize({startOnLoad:true});
</script>
DECLARE @ugeDag INT = :dag-- 1 Søndag 7 lørdag
DECLARE @stepID INT = :step --default 0 da det giver overordnet flow tid
;WITH ctedateconversion AS(
SELECT PERCENTILE_DISC(0.5) WITHIN GROUP (ORDER BY run_datetime) OVER() AS MedianRun_datetime
, [t].[run_datetime]
, PERCENTILE_DISC(0.5) WITHIN GROUP (ORDER BY [t].[run_duration]) OVER() AS run_duration
FROM
(
SELECT job_name, run_datetime,
SUBSTRING(run_duration, 1, 2) + ':' + SUBSTRING(run_duration, 3, 2) + ':' +
SUBSTRING(run_duration, 5, 2) AS run_duration
from
(
SELECT TOP (@dage)
j.name as job_name,
run_datetime = CONVERT(DATETIME, '1970-01-01') +
(run_time * 9 + run_time % 10000 * 6 + run_time % 100 * 10) / 216e4,
run_duration = RIGHT('000000' + CONVERT(varchar(6), run_duration), 6)
from msdb..sysjobhistory h
inner join msdb..sysjobs j
ON h.job_id = j.job_id
WHERE
[j].[name] LIKE @job AND [h].[step_id]=@stepID
AND DATEPART(WEEKDAY,CONVERT(DATETIME, RTRIM(run_date))
+ (run_time * 9 + run_time % 10000 * 6 + run_time % 100 * 10) / 216e4)=@ugeDag --1 = mandag
ORDER BY [h].[run_date] DESC
) t
) t
)
SELECT [ctedateconversion].[MedianRun_datetime] AS MedianRun_datetime_SpecificDate,[ctedateconversion].[run_duration]
FROM [ctedateconversion]
WHERE [ctedateconversion].[MedianRun_datetime] = [ctedateconversion].[run_datetime]
GROUP BY [ctedateconversion].[MedianRun_datetime],
[ctedateconversion].[run_duration]''')