m
This commit is contained in:
parent
3ad6805621
commit
5b0b3badbb
@ -5,6 +5,10 @@ import FlowTest
|
||||
from sysjobs import *
|
||||
import sysjob2html
|
||||
import pandas as pd
|
||||
import logging
|
||||
from dateutil import tz
|
||||
from pytz import timezone
|
||||
import koerselsOverblikUtils
|
||||
|
||||
def create_app(test_config=None):
|
||||
# create and configure the app
|
||||
@ -25,7 +29,9 @@ def create_app(test_config=None):
|
||||
os.makedirs(FlowAnalyserMain.instance_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
@FlowAnalyserMain.context_processor
|
||||
def inject_debug():
|
||||
return dict(debug=FlowAnalyserMain.debug)
|
||||
# a simple page that says hello
|
||||
@FlowAnalyserMain.route('/hello')
|
||||
def hello():
|
||||
@ -33,23 +39,37 @@ def create_app(test_config=None):
|
||||
@FlowAnalyserMain.route('/test')
|
||||
def test():
|
||||
listSysjobs=[]
|
||||
listStartSlutjobs=[]
|
||||
listStartSlutjobs.append(Sysjobs.getSysjob(session,'BI - Flow - Batch Start – Daglig kl. 20.00',True)[0])
|
||||
listStartSlutjobs.append(Sysjobs.getSysjob(session,'BI - Flow - Batch Slut – Daglig kl. 20.00',True)[0])
|
||||
listSysjobs.append(listStartSlutjobs[0][0])
|
||||
listStartSlutjobs=[Sysjobs.getSysjob(session,'BI - Flow - Batch Start – Daglig kl. 20.00',True)[0][0]]
|
||||
listStartSlutjobs.append(Sysjobs.getSysjob(session,'BI - Flow - Batch Slut – Daglig kl. 20.00',True)[0][0])
|
||||
listStartSlutjobs=koerselsOverblikUtils.convertToAlike(listStartSlutjobs,0,30,0)
|
||||
sysjobsAlike=koerselsOverblikUtils.timeRangeMerge(listStartSlutjobs, "Batch køretid",0,30,2)
|
||||
listSysjobs.append(sysjobsAlike)
|
||||
listSysjobs.extend(listStartSlutjobs)
|
||||
sysjobs=Sysjobs.getSysjob(session,'% - Admin - %',False)
|
||||
sysjobs=[a for a in sysjobs if a[0].name not in ["BI - Admin - Kill Blocking Queries","BI - Admin - Flow Job Restarter"]]
|
||||
if(sysjobs!=None and len(sysjobs)>0):
|
||||
for i in sysjobs:
|
||||
listSysjobs.append(i[0])
|
||||
listSysjobs.append(listStartSlutjobs[1][0])
|
||||
sysjobs=koerselsOverblikUtils.convertToAlike(sysjobs,0,30,2)
|
||||
listSysjobs.extend(sysjobs)
|
||||
listSysjobs = [x for x in listSysjobs if len(x.getMedianDag(0,30,2))>0]
|
||||
listSysjobs = sorted(listSysjobs, key=lambda x: x.getMedianDag(0,30,2)[0])
|
||||
return render_template('index.html', test=listSysjobs,startSlut=listStartSlutjobs)
|
||||
return render_template('index.html', test=listSysjobs)#,startSlut=listStartSlutjobs)
|
||||
@FlowAnalyserMain.route('/test3')
|
||||
def test3():
|
||||
sysjobs=(Sysjobs.getNattensKoersel(session))
|
||||
return render_template('index3.html', test3=sysjobs)
|
||||
|
||||
@FlowAnalyserMain.route('/test4')
|
||||
def test4():
|
||||
sessions=(biadmin_log_ActiveSessionsByInspari.getSessionTimeSpan(session,
|
||||
datetime.fromisoformat('2023-06-01 23:14:16.817'),
|
||||
datetime.fromisoformat('2023-06-02 03:14:18.817')))
|
||||
return render_template('index4.html', test4=sessions)
|
||||
@FlowAnalyserMain.route('/test4/<sessionID>/<logdate>')
|
||||
def test4_getSession(sessionID,logdate):
|
||||
sessions=(biadmin_log_ActiveSessionsByInspari.getSessionByID(session,int(sessionID),
|
||||
datetime.fromisoformat(logdate).astimezone(tz.gettz('Europe/Copenhagen'))))
|
||||
return render_template('index4.html', test4=sessions)
|
||||
|
||||
@FlowAnalyserMain.route('/test2')
|
||||
def test2():
|
||||
with Session(engine) as session:
|
||||
@ -62,5 +82,8 @@ def create_app(test_config=None):
|
||||
return FlowAnalyserMain
|
||||
|
||||
engine=inn.getEngine("msdb")
|
||||
logging.basicConfig()
|
||||
logging.getLogger("sqlalchemy.engine").setLevel(logging.DEBUG)
|
||||
logging.getLogger("sqlalchemy.pool").setLevel(logging.DEBUG)
|
||||
with Session(engine) as session:
|
||||
FlowAnalyserMain=create_app()
|
||||
|
||||
Binary file not shown.
Binary file not shown.
BIN
__pycache__/koerselsOverblikUtils.cpython-311.pyc
Normal file
BIN
__pycache__/koerselsOverblikUtils.cpython-311.pyc
Normal file
Binary file not shown.
Binary file not shown.
2
inn.py
2
inn.py
@ -7,7 +7,7 @@ import urllib
|
||||
def getEngine(database):
|
||||
server = 'bi-dsa-test\dsa' # to specify an alternate port
|
||||
username = 'admindenker'
|
||||
password = 'biadmin#kode4rm2'
|
||||
password = 'Min!sterTj€n€r€nhv3r$ta7s11g3'
|
||||
|
||||
|
||||
connection_string = "DRIVER={SQL Server};Database="+database+";SERVER="+server
|
||||
|
||||
55
koerselsOverblikUtils.py
Normal file
55
koerselsOverblikUtils.py
Normal file
@ -0,0 +1,55 @@
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
from sysjobs import Sysjobs
|
||||
|
||||
|
||||
class SysjobsAlike:
|
||||
name=None
|
||||
startTime=None
|
||||
endTime=None
|
||||
stepId=None
|
||||
antalDage=None
|
||||
ugeDage=None
|
||||
def __init__(self,name):
|
||||
self.name=name
|
||||
def getName(self):
|
||||
return self.name
|
||||
def getMedianDag(self,stepId,antalDage,ugeDage):
|
||||
medianDag=[self.startTime,self.endTime]
|
||||
return medianDag
|
||||
def mergeOneMore(self,sysjobs:Sysjobs):
|
||||
medianDag=sysjobs.getMedianDag(self.stepId,self.antalDage,self.ugeDage)
|
||||
if self.startTime>medianDag[0]:
|
||||
self.startTime=medianDag[0]
|
||||
if self.endTime<medianDag[1]:
|
||||
self.endTime=medianDag[1]
|
||||
|
||||
def timeRangeMerge(mainList : List['Sysjobs'], name,stepId,antalDage,ugeDage):
|
||||
sysjobalike = SysjobsAlike(name)
|
||||
sysjobalike.stepId=stepId
|
||||
sysjobalike.antalDage=antalDage
|
||||
sysjobalike.ugeDage=ugeDage
|
||||
for job in mainList:
|
||||
medianDag=job.getMedianDag(stepId,antalDage,ugeDage)
|
||||
if sysjobalike.startTime is None or sysjobalike.startTime>medianDag[0]:
|
||||
sysjobalike.startTime=medianDag[0]
|
||||
if sysjobalike.endTime is None or sysjobalike.endTime<medianDag[1]:
|
||||
sysjobalike.endTime=medianDag[1]
|
||||
return sysjobalike
|
||||
|
||||
def convertToAlike(mainList : List,stepId,antalDage,ugeDage):
|
||||
returnList=[]
|
||||
for job in mainList:
|
||||
if not isinstance(job,Sysjobs) and isinstance(job[0],Sysjobs):
|
||||
job=job[0]
|
||||
jobAlike=SysjobsAlike(job.name)
|
||||
jobAlike.stepId=stepId
|
||||
jobAlike.antalDage=antalDage
|
||||
jobAlike.ugeDage=ugeDage
|
||||
medianDag=job.getMedianDag(stepId,antalDage,ugeDage)
|
||||
if len(medianDag)==0:
|
||||
continue
|
||||
jobAlike.startTime=medianDag[0]
|
||||
jobAlike.endTime=medianDag[1]
|
||||
returnList.append(jobAlike)
|
||||
return returnList
|
||||
BIN
static/X2GoClient_latest_mswin32-setup.exe
Normal file
BIN
static/X2GoClient_latest_mswin32-setup.exe
Normal file
Binary file not shown.
BIN
static/ssisdb_erd23.pdf
Normal file
BIN
static/ssisdb_erd23.pdf
Normal file
Binary file not shown.
77
sysjobs.py
77
sysjobs.py
@ -1,7 +1,6 @@
|
||||
#from __future__ import annotations
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy import BOOLEAN, Column,INTEGER,NVARCHAR, ForeignKey,Select, and_, or_,DateTime,text,VARCHAR
|
||||
from sqlalchemy import BOOLEAN, Column,INTEGER,NVARCHAR, ForeignKey,Select, and_, or_,DateTime,text,VARCHAR,String,SMALLINT,DECIMAL,DATETIME,BIGINT
|
||||
from sqlalchemy.dialects.mssql import UNIQUEIDENTIFIER, TINYINT
|
||||
from sqlalchemy.orm import relationship,Session,Mapped,mapped_column,contains_eager
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
@ -11,6 +10,7 @@ from json import JSONEncoder
|
||||
#import sysjobhistory
|
||||
import inn
|
||||
|
||||
|
||||
class MyEncoder(JSONEncoder):
|
||||
def default(self, obj):
|
||||
if(isinstance(obj,Sysjobs) or isinstance(obj,Sysjobhistory)):
|
||||
@ -20,6 +20,16 @@ class MyEncoder(JSONEncoder):
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class DataflowManagement_JobAfhaengighed(Base):
|
||||
__tablename__ = "JobAfhaengighed"
|
||||
__table_args__ = { "schema": "dataflowmanagement.flw" }
|
||||
ParentJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
|
||||
ChildJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
|
||||
OprettetDato: Mapped[datetime] = Column(DateTime)
|
||||
AendretDato: Mapped[datetime] = Column(DateTime)
|
||||
parent: Mapped[List["Sysjobs"]] = relationship(back_populates="parents",foreign_keys=[ParentJobID])
|
||||
child: Mapped[List["Sysjobs"]] = relationship(back_populates="children",foreign_keys=[ChildJobID])
|
||||
|
||||
class Sysjobs(Base):
|
||||
__tablename__ = "sysjobs"
|
||||
job_id: Mapped[str] = mapped_column(UNIQUEIDENTIFIER,primary_key=True)
|
||||
@ -29,6 +39,22 @@ class Sysjobs(Base):
|
||||
dataflow_jobs: Mapped[List["DataflowManagement_JobListe"]] = relationship(back_populates="sysjob")
|
||||
sysjobsteps: Mapped[List["msdb_sysjobsteps"]] = relationship(back_populates="sysjob")
|
||||
|
||||
# children: Mapped[List["Sysjobs"]] = relationship(secondary="JobAfhaengighed",back_populates="parents",
|
||||
# primaryjoin="Sysjobs.job_id==DataflowManagement_JobAfhaengighed.ParentJobID",
|
||||
# secondaryjoin="Sysjobs.job_id==DataflowManagement_JobAfhaengighed.ChildJobID")
|
||||
# parents: Mapped[List["Sysjobs"]] =relationship(back_populates="children",
|
||||
# secondary="JobAfhaengighed",
|
||||
# primaryjoin="Sysjobs.job_id==DataflowManagement_JobAfhaengighed.ChildJobID",
|
||||
# secondaryjoin="Sysjobs.job_id==DataflowManagement_JobAfhaengighed.ParentJobID"
|
||||
# )
|
||||
# children: Mapped[List["Sysjobs"]] = relationship(secondary='JobAfhaengighed',back_populates="parents",
|
||||
# primaryjoin=job_id==DataflowManagement_JobAfhaengighed.ParentJobID,
|
||||
# secondaryjoin=job_id==DataflowManagement_JobAfhaengighed.ChildJobID)
|
||||
# parents: Mapped[List["Sysjobs"]] =relationship(back_populates="children",
|
||||
# secondary='JobAfhaengighed',
|
||||
# primaryjoin=job_id==DataflowManagement_JobAfhaengighed.ChildJobID,
|
||||
# secondaryjoin=job_id==DataflowManagement_JobAfhaengighed.ParentJobID
|
||||
# )
|
||||
parents: Mapped[List["DataflowManagement_JobAfhaengighed"]] =relationship(back_populates="child", foreign_keys="DataflowManagement_JobAfhaengighed.ChildJobID")
|
||||
children: Mapped[List["DataflowManagement_JobAfhaengighed"]] = relationship(back_populates="parent", foreign_keys="DataflowManagement_JobAfhaengighed.ParentJobID")
|
||||
|
||||
@ -55,7 +81,7 @@ class Sysjobs(Base):
|
||||
def getTest(self,session: Session):
|
||||
stmt = Select(Sysjobs).join(Sysjobhistory).where(Sysjobhistory.run_date>20230601).distinct()
|
||||
print(stmt)
|
||||
with Session(engine) as session:
|
||||
with Session(session) as session:
|
||||
row : Sysjobs
|
||||
res = session.execute(stmt).all()
|
||||
for row in res:
|
||||
@ -129,7 +155,7 @@ GROUP BY [ctedateconversion].[MedianRun_datetime],
|
||||
resReturn: List['Sysjobs'] = list()
|
||||
stmt = Select(Sysjobs).join(DataflowManagement_JobListe).join(Sysjobhistory).join(DataflowManagement_JobsForExecution).where(Sysjobhistory.step_id==0).where(DataflowManagement_JobListe.Aktiv==1).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d'))))).distinct()
|
||||
row : Sysjobs
|
||||
stmt.options(contains_eager(Sysjobs.sysjobhistories), contains_eager(Sysjobs.parents), contains_eager(DataflowManagement_JobAfhaengighed.parent))
|
||||
|
||||
res = session.execute(stmt).all()
|
||||
return res
|
||||
|
||||
@ -156,6 +182,7 @@ GROUP BY [ctedateconversion].[MedianRun_datetime],
|
||||
def getSmallestWaitOfParent(self):
|
||||
return
|
||||
|
||||
|
||||
class Sysjobhistory(Base):
|
||||
__tablename__ = "sysjobhistory"
|
||||
instance_id=Column(INTEGER,primary_key=True)
|
||||
@ -229,15 +256,6 @@ class DataflowManagement_JobListe(Base):
|
||||
sysjob: Mapped["Sysjobs"] = relationship(back_populates="dataflow_jobs")
|
||||
|
||||
|
||||
class DataflowManagement_JobAfhaengighed(Base):
|
||||
__tablename__ = "JobAfhaengighed"
|
||||
__table_args__ = { "schema": "dataflowmanagement.flw" }
|
||||
ParentJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
|
||||
ChildJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
|
||||
OprettetDato: Mapped[datetime] = Column(DateTime)
|
||||
AendretDato: Mapped[datetime] = Column(DateTime)
|
||||
parent: Mapped[List["Sysjobs"]] = relationship(back_populates="parents",foreign_keys=[ParentJobID])
|
||||
child: Mapped[List["Sysjobs"]] = relationship(back_populates="children",foreign_keys=[ChildJobID])
|
||||
|
||||
class DataflowManagement_JobMasterSetup(Base):
|
||||
__tablename__ = "JobMasterSetup"
|
||||
@ -267,8 +285,37 @@ class msdb_sysjobsteps(Base):
|
||||
command:Mapped[str] = mapped_column(NVARCHAR)
|
||||
sysjob: Mapped["Sysjobs"] = relationship(back_populates="sysjobsteps")
|
||||
|
||||
class biadmin_log_ActiveSessionsByInspari(Base):
|
||||
__tablename__="ActiveSessionsByInspari"
|
||||
__table_args__={"schema":"biadmin.log"}
|
||||
session_id:Mapped[int]=mapped_column(SMALLINT,primary_key=True)
|
||||
blocking_session_id:Mapped[int]=mapped_column(SMALLINT)
|
||||
ElapsedTimeSec:Mapped[float]=mapped_column(DECIMAL)
|
||||
ElapsedTimeMinutes:Mapped[float]=mapped_column(DECIMAL)
|
||||
request_start_time:Mapped[datetime]=mapped_column(DateTime)
|
||||
DatabaseName:Mapped[str]=mapped_column(NVARCHAR)
|
||||
WaitTimeMs:Mapped[int]=mapped_column(INTEGER)
|
||||
wait_type:Mapped[str]=mapped_column(NVARCHAR)
|
||||
host_process_id:Mapped[int]=mapped_column(INTEGER)
|
||||
MemoryGrantRequestTime:Mapped[datetime]=mapped_column(DateTime)
|
||||
MemoryGrantGrantedTime:Mapped[datetime]=mapped_column(DateTime)
|
||||
LogDate:Mapped[datetime]=mapped_column(DATETIME,primary_key=True)
|
||||
|
||||
|
||||
|
||||
def getSessionTimeSpan(session:Session,beginTime:datetime,Endtime:datetime):
|
||||
stmt=Select(biadmin_log_ActiveSessionsByInspari).where(and_(biadmin_log_ActiveSessionsByInspari.request_start_time>=beginTime,biadmin_log_ActiveSessionsByInspari.request_start_time<=Endtime))
|
||||
res=session.execute(stmt).all()
|
||||
return res
|
||||
def getSessionByID(session:Session,sessionID:int,logDate:datetime=None):
|
||||
stmt=Select(biadmin_log_ActiveSessionsByInspari).where(and_(biadmin_log_ActiveSessionsByInspari.session_id==sessionID,biadmin_log_ActiveSessionsByInspari.LogDate==logDate))
|
||||
res=session.execute(stmt).all()
|
||||
return res
|
||||
#class AllExecutionMessages(Base):
|
||||
class BiAdmin_log_WaitingTasks(Base):
|
||||
__tablename__="WaitingTasks"
|
||||
__table_args__={"schema":"biadmin.log"}
|
||||
session_id:Mapped[int]=mapped_column(SMALLINT,primary_key=True)
|
||||
wait_duration_ms:Mapped[int]=mapped_column(BIGINT)
|
||||
wait_type:Mapped[str]=mapped_column(NVARCHAR)
|
||||
blocking_session_id:Mapped[int]=mapped_column(SMALLINT)
|
||||
LogDate:Mapped[datetime]=mapped_column(DATETIME,primary_key=True)
|
||||
|
||||
@ -18,7 +18,7 @@
|
||||
|
||||
|
||||
data.addRows([
|
||||
['Normal flow','Normal flow',new Date("{{ startSlut[0][0].getMedianDag(0,30,2)[0] }}"),new Date("{{ startSlut[1][0].getMedianDag(0,30,2)[0] }}")],
|
||||
|
||||
{% for job in test %}
|
||||
['{{ job.name }}','{{ job.name }}',new Date("{{ job.getMedianDag(0,30,2)[0] }}"),new Date("{{ job.getMedianDag(0,30,2)[1] }}")],
|
||||
{% endfor %}
|
||||
|
||||
62
templates/index4.html
Normal file
62
templates/index4.html
Normal file
@ -0,0 +1,62 @@
|
||||
<html>
|
||||
<head>
|
||||
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
|
||||
<script type="text/javascript">
|
||||
google.charts.load('current', {'packages':['gantt']});
|
||||
google.charts.setOnLoadCallback(drawChart);
|
||||
var refData = new Array()
|
||||
{% for job in test4 %}
|
||||
refData.push([{{ job[0].session_id }},new Date("{{ job[0].LogDate }}")])
|
||||
{% endfor %}
|
||||
function drawChart() {
|
||||
|
||||
var data = new google.visualization.DataTable();
|
||||
data.addColumn('string', 'Task ID');
|
||||
data.addColumn('string', 'Task Name');
|
||||
data.addColumn('string', 'Resource');
|
||||
data.addColumn('date', 'Start Date');
|
||||
data.addColumn('date', 'End Date');
|
||||
data.addColumn('number', 'Duration');
|
||||
data.addColumn('number', 'Percent Complete');
|
||||
data.addColumn('string', 'Dependencies');
|
||||
|
||||
data.addRows([
|
||||
{% for job in test4 %}
|
||||
['{{ job[0].session_id }}','{{ job[0].DatabaseName }}','{{ job[0].DatabaseName }}', new Date("{{ job[0].request_start_time }}"),new Date(new Date("{{ (job[0].request_start_time) }}").setSeconds(new Date("{{ job[0].request_start_time }}").getSeconds()+{{ job[0].ElapsedTimeSec }})),null,null,null],
|
||||
{% endfor %}
|
||||
]);
|
||||
|
||||
var paddingHeight = 50;
|
||||
|
||||
var rowHeight = data.getNumberOfRows() * 25;
|
||||
|
||||
var chartHeight = rowHeight + paddingHeight;
|
||||
var options = {
|
||||
height: chartHeight,
|
||||
gantt: {
|
||||
sortTasks: true,
|
||||
trackHeight: 30
|
||||
}
|
||||
};
|
||||
|
||||
var chart = new google.visualization.Gantt(document.getElementById('chart_div'));
|
||||
google.visualization.events.addListener(chart, 'select', myClickHandler);
|
||||
function myClickHandler(){
|
||||
var selection = chart.getSelection();
|
||||
var ses=refData[selection[0].row][0]
|
||||
var ldat=refData[selection[0].row][1].toISOString()
|
||||
var turl='{{ url_for('test4_getSession' ,sessionID='ses',logdate='ldat') }}'
|
||||
turl = turl.replace('ses', ses);
|
||||
turl = turl.replace('ldat', ldat);
|
||||
window.location.assign(turl)
|
||||
|
||||
}
|
||||
chart.draw(data, options);
|
||||
}
|
||||
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="chart_div"></div>
|
||||
</body>
|
||||
</html>
|
||||
Loading…
x
Reference in New Issue
Block a user