Compare commits

...

4 Commits

Author SHA1 Message Date
dennis
b67e408525 Fjernet cache 2023-09-11 11:09:53 +02:00
dennis
8316e6d0e2 lkj 2023-09-11 10:53:50 +02:00
Dennis Kerschus
5b0b3badbb m 2023-09-05 09:48:55 +02:00
Dennis Kerschus
3ad6805621 backup 2023-06-28 14:23:38 +02:00
27 changed files with 539 additions and 67 deletions

8
.gitignore vendored
View File

@@ -1,3 +1,11 @@
/.venv/Lib /.venv/Lib
/.venv/Scripts /.venv/Scripts
/.venv/share /.venv/share
/__pycache__
__pycache__/env.cpython-311.pyc
__pycache__/FlowAnalyserMain.cpython-311.pyc
__pycache__/FlowTest.cpython-311.pyc
__pycache__/inn.cpython-311.pyc
__pycache__/koerselsOverblikUtils.cpython-311.pyc
__pycache__/sysjob2html.cpython-311.pyc
__pycache__/sysjobs.cpython-311.pyc

View File

@@ -0,0 +1,164 @@
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
/* Greenlet object interface */
#ifndef Py_GREENLETOBJECT_H
#define Py_GREENLETOBJECT_H
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
/* This is deprecated and undocumented. It does not change. */
#define GREENLET_VERSION "1.0.0"
#ifndef GREENLET_MODULE
#define implementation_ptr_t void*
#endif
typedef struct _greenlet {
PyObject_HEAD
PyObject* weakreflist;
PyObject* dict;
implementation_ptr_t pimpl;
} PyGreenlet;
#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))
/* C API functions */
/* Total number of symbols that are exported */
#define PyGreenlet_API_pointers 12
#define PyGreenlet_Type_NUM 0
#define PyExc_GreenletError_NUM 1
#define PyExc_GreenletExit_NUM 2
#define PyGreenlet_New_NUM 3
#define PyGreenlet_GetCurrent_NUM 4
#define PyGreenlet_Throw_NUM 5
#define PyGreenlet_Switch_NUM 6
#define PyGreenlet_SetParent_NUM 7
#define PyGreenlet_MAIN_NUM 8
#define PyGreenlet_STARTED_NUM 9
#define PyGreenlet_ACTIVE_NUM 10
#define PyGreenlet_GET_PARENT_NUM 11
#ifndef GREENLET_MODULE
/* This section is used by modules that uses the greenlet C API */
static void** _PyGreenlet_API = NULL;
# define PyGreenlet_Type \
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
# define PyExc_GreenletError \
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
# define PyExc_GreenletExit \
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
/*
* PyGreenlet_New(PyObject *args)
*
* greenlet.greenlet(run, parent=None)
*/
# define PyGreenlet_New \
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
_PyGreenlet_API[PyGreenlet_New_NUM])
/*
* PyGreenlet_GetCurrent(void)
*
* greenlet.getcurrent()
*/
# define PyGreenlet_GetCurrent \
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
/*
* PyGreenlet_Throw(
* PyGreenlet *greenlet,
* PyObject *typ,
* PyObject *val,
* PyObject *tb)
*
* g.throw(...)
*/
# define PyGreenlet_Throw \
(*(PyObject * (*)(PyGreenlet * self, \
PyObject * typ, \
PyObject * val, \
PyObject * tb)) \
_PyGreenlet_API[PyGreenlet_Throw_NUM])
/*
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
*
* g.switch(*args, **kwargs)
*/
# define PyGreenlet_Switch \
(*(PyObject * \
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
_PyGreenlet_API[PyGreenlet_Switch_NUM])
/*
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
*
* g.parent = new_parent
*/
# define PyGreenlet_SetParent \
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
/*
* PyGreenlet_GetParent(PyObject* greenlet)
*
* return greenlet.parent;
*
* This could return NULL even if there is no exception active.
* If it does not return NULL, you are responsible for decrementing the
* reference count.
*/
# define PyGreenlet_GetParent \
(*(PyGreenlet* (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])
/*
* deprecated, undocumented alias.
*/
# define PyGreenlet_GET_PARENT PyGreenlet_GetParent
# define PyGreenlet_MAIN \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_MAIN_NUM])
# define PyGreenlet_STARTED \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_STARTED_NUM])
# define PyGreenlet_ACTIVE \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_ACTIVE_NUM])
/* Macro that imports greenlet and initializes C API */
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
keep the older definition to be sure older code that might have a copy of
the header still works. */
# define PyGreenlet_Import() \
{ \
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
}
#endif /* GREENLET_MODULE */
#ifdef __cplusplus
}
#endif
#endif /* !Py_GREENLETOBJECT_H */

View File

@@ -1,5 +1,5 @@
home = C:\Python311 home = C:\Users\denker\AppData\Local\Programs\Python\Python311
include-system-site-packages = false include-system-site-packages = false
version = 3.11.4 version = 3.11.4
executable = C:\Python311\python.exe executable = C:\Users\denker\AppData\Local\Programs\Python\Python311\python.exe
command = C:\Python311\python.exe -m venv --upgrade C:\users\denker\Documents\udvikling\Python\FlowAnalyser\.venv command = C:\Users\denker\AppData\Local\Programs\Python\Python311\python.exe -m venv C:\Users\denker\Udvikling\FlowAnalyser\.venv

View File

@@ -5,6 +5,11 @@ import FlowTest
from sysjobs import * from sysjobs import *
import sysjob2html import sysjob2html
import pandas as pd import pandas as pd
import logging
from dateutil import tz
from pytz import timezone
import koerselsOverblikUtils
from koerselsOverblikUtils import OverblikDag
def create_app(test_config=None): def create_app(test_config=None):
# create and configure the app # create and configure the app
@@ -25,24 +30,34 @@ def create_app(test_config=None):
os.makedirs(FlowAnalyserMain.instance_path) os.makedirs(FlowAnalyserMain.instance_path)
except OSError: except OSError:
pass pass
@FlowAnalyserMain.context_processor
def inject_debug():
return dict(debug=FlowAnalyserMain.debug)
# a simple page that says hello # a simple page that says hello
@FlowAnalyserMain.route('/hello') @FlowAnalyserMain.route('/hello')
def hello(): def hello():
return FlowTest.test() return FlowTest.test()
@FlowAnalyserMain.route('/test') @FlowAnalyserMain.route('/test')
def test(): def test():
listSysjobs=[] return render_template('index.html', overblikDag=OverblikDag(session))
jobs=['BI - Flow - Batch Start Daglig kl. 20.00','BI - Admin - Log Index Stats', 'BI - Flow - MDS Backup','BI - Admin - shrink staging filgruppe samt log filer' #return render_template('index.html', test=listSysjobs6)#,startSlut=listStartSlutjobs)
,'BI - Flow - Batch Slut Daglig kl. 20.00']
for job in jobs:
listSysjobs.append(list(Sysjobs.getSysjob(session,job))[0])
return render_template('index.html', test=listSysjobs)
@FlowAnalyserMain.route('/test3') @FlowAnalyserMain.route('/test3')
def test3(): def test3():
sysjobs=(Sysjobs.getNattensKoersel(session)) sysjobs=(Sysjobs.getNattensKoersel(session))
return render_template('index3.html', test3=sysjobs) return render_template('index3.html', test3=sysjobs)
@FlowAnalyserMain.route('/test4')
def test4():
sessions=(biadmin_log_ActiveSessionsByInspari.getSessionTimeSpan(session,
datetime.fromisoformat('2023-06-01 23:14:16.817'),
datetime.fromisoformat('2023-06-02 03:14:18.817')))
return render_template('index4.html', test4=sessions)
@FlowAnalyserMain.route('/test4/<sessionID>/<logdate>')
def test4_getSession(sessionID,logdate):
sessions=(biadmin_log_ActiveSessionsByInspari.getSessionByID(session,int(sessionID),
datetime.fromisoformat(logdate).astimezone(tz.gettz('Europe/Copenhagen'))))
return render_template('index4.html', test4=sessions)
@FlowAnalyserMain.route('/test2') @FlowAnalyserMain.route('/test2')
def test2(): def test2():
with Session(engine) as session: with Session(engine) as session:
@@ -55,5 +70,8 @@ def create_app(test_config=None):
return FlowAnalyserMain return FlowAnalyserMain
engine=inn.getEngine("msdb") engine=inn.getEngine("msdb")
logging.basicConfig()
logging.getLogger("sqlalchemy.engine").setLevel(logging.DEBUG)
logging.getLogger("sqlalchemy.pool").setLevel(logging.DEBUG)
with Session(engine) as session: with Session(engine) as session:
FlowAnalyserMain=create_app() FlowAnalyserMain=create_app()

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

2
env.py
View File

@@ -1,4 +1,4 @@
server = 'bi-dsa-udv\dsa' # to server = 'bi-dsa-udv\dsa' # to
database = 'udv_denker' database = 'udv_denker'
username = 'admindenker' username = 'admindenker'
password = 'biadmin#kode4rmO1' password = 'Min!sterTj€n€r€nhv3r$ta7s11g3'

2
inn.py
View File

@@ -7,7 +7,7 @@ import urllib
def getEngine(database): def getEngine(database):
server = 'bi-dsa-test\dsa' # to specify an alternate port server = 'bi-dsa-test\dsa' # to specify an alternate port
username = 'admindenker' username = 'admindenker'
password = 'biadmin#kode4rm2' password = 'Min!sterTj€n€r€nhv3r$ta7s11g3'
connection_string = "DRIVER={SQL Server};Database="+database+";SERVER="+server connection_string = "DRIVER={SQL Server};Database="+database+";SERVER="+server

117
koerselsOverblikUtils.py Normal file
View File

@@ -0,0 +1,117 @@
from datetime import datetime
from typing import Dict, List
from sysjobs import Sysjobs
class SysjobsGroup:
name = None
listOfSysjobsInGroup:List['Sysjobs'] = []
def __init__(self,name,sysjobs:List['Sysjobs']):
self.name=name
if(sysjobs!=None and len(sysjobs)>0):
self.listOfSysjobsInGroup=sysjobs
def addToGroup(self,sysjob:Sysjobs):
self.listOfSysjobsInGroup.append(sysjob)
def getSysjobsAlike(self,stepId,antalDage,ugeDage):
sysjobsAlike=SysjobsAlike(self.name)
sysjobsAlike
sysjobsAlike.stepId=stepId
sysjobsAlike.antalDage=antalDage
sysjobsAlike.ugeDage=ugeDage
for job in self.listOfSysjobsInGroup:
if(not isinstance(job,Sysjobs)):
job=job[0]
medianDag=job.getMedianDag(stepId,antalDage,ugeDage)
if sysjobsAlike.startTime is None or sysjobsAlike.startTime>medianDag[0]:
sysjobsAlike.startTime=medianDag[0]
if sysjobsAlike.endTime is None or sysjobsAlike.endTime<medianDag[1]:
sysjobsAlike.endTime=medianDag[1]
return sysjobsAlike
class SysjobsAlike:
name=None
startTime=None
endTime=None
stepId=None
antalDage=None
ugeDage=None
def __init__(self,name):
self.name=name
def getName(self):
return self.name
def getMedianDag(self,stepId,antalDage,ugeDage):
medianDag=[self.startTime,self.endTime]
return medianDag
def mergeOneMore(self,sysjobs:Sysjobs):
medianDag=sysjobs.getMedianDag(self.stepId,self.antalDage,self.ugeDage)
if self.startTime>medianDag[0]:
self.startTime=medianDag[0]
if self.endTime<medianDag[1]:
self.endTime=medianDag[1]
def timeRangeMerge(mainList : List['Sysjobs'], name,stepId,antalDage,ugeDage):
sysjobalike = SysjobsAlike(name)
sysjobalike.stepId=stepId
sysjobalike.antalDage=antalDage
sysjobalike.ugeDage=ugeDage
for job in mainList:
medianDag=job.getMedianDag(stepId,antalDage,ugeDage)
if sysjobalike.startTime is None or sysjobalike.startTime>medianDag[0]:
sysjobalike.startTime=medianDag[0]
if sysjobalike.endTime is None or sysjobalike.endTime<medianDag[1]:
sysjobalike.endTime=medianDag[1]
return sysjobalike
def convertToAlike(mainList : List,stepId,antalDage,ugeDage):
returnList=[]
for job in mainList:
if not isinstance(job,Sysjobs) and isinstance(job[0],Sysjobs):
job=job[0]
jobAlike=SysjobsAlike(job.name)
jobAlike.stepId=stepId
jobAlike.antalDage=antalDage
jobAlike.ugeDage=ugeDage
medianDag=job.getMedianDag(stepId,antalDage,ugeDage)
if len(medianDag)==0:
continue
jobAlike.startTime=medianDag[0]
jobAlike.endTime=medianDag[1]
returnList.append(jobAlike)
return returnList
def getOverblikDag(session,dag,resList,):
return
class OverblikDag:
session=None
dagsList:Dict={}
def __init__(self,session):
self.session=session
def getKoerselsOverblikForDag(self,dag):
if(self.dagsList.get(dag)==None):
listSysjobs=[]
startStobjobs:Sysjobs = []
startStobjobs.append(Sysjobs.getSysjob(self.session,'BI - Flow - Batch Start Daglig kl. 20.00',True)[0][0])
startStobjobs.append(Sysjobs.getSysjob(self.session,'BI - Flow - Batch Slut Daglig kl. 20.00',True)[0][0])
startStopGroup=SysjobsGroup("Batch Kørsel",startStobjobs)
sysjobs=Sysjobs.getSysjob(self.session,'% - Admin - %',False)
sysjobs=[a for a in sysjobs if a[0].name not in ["BI - Admin - Kill Blocking Queries","BI - Admin - Flow Job Restarter"]]
adminPostJobs=SysjobsGroup("Batch kørsel post jobs - Admin",[a for a in sysjobs if a[0].name in ["BI - Admin - Log TableRowCounts","BI - Admin - BiBackup","BI - Admin - shrink staging filgruppe samt log filer","BI - Admin - Dimension and Map Table checker","BI - Admin - Log Post Batch Flow",
"BI - Admin - Sikkerhedskontrol","BI - Admin - Check Indexes"]])
sysjobs=[a for a in sysjobs if a[0].name not in [a[0].name for a in adminPostJobs.listOfSysjobsInGroup]]
sysjobs.extend(startStobjobs)
sysjobs=convertToAlike(sysjobs,0,30,dag)
sysjobs.append(startStopGroup.getSysjobsAlike(0,30,dag))
sysjobs.append(adminPostJobs.getSysjobsAlike(0,30,dag))
self.dagsList[dag]=sorted(sysjobs, key=lambda x: x.getMedianDag(0,30,dag)[0])
return self.dagsList.get(dag)

Binary file not shown.

Binary file not shown.

BIN
static/ssisdb_erd23.pdf Normal file

Binary file not shown.

View File

@@ -1,9 +1,8 @@
#from __future__ import annotations #from __future__ import annotations
from typing import List from typing import List
from sqlalchemy import BOOLEAN, Column,INTEGER,NVARCHAR, ForeignKey,Select, and_, or_,DateTime,text,VARCHAR,String,SMALLINT,DECIMAL,DATETIME,BIGINT
from sqlalchemy import BOOLEAN, Column,INTEGER,NVARCHAR, ForeignKey,Select, and_, or_,DateTime,text
from sqlalchemy.dialects.mssql import UNIQUEIDENTIFIER, TINYINT from sqlalchemy.dialects.mssql import UNIQUEIDENTIFIER, TINYINT
from sqlalchemy.orm import relationship,Session,Mapped,mapped_column from sqlalchemy.orm import relationship,Session,Mapped,mapped_column,contains_eager
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
from datetime import datetime,timedelta,time from datetime import datetime,timedelta,time
import json import json
@@ -11,6 +10,7 @@ from json import JSONEncoder
#import sysjobhistory #import sysjobhistory
import inn import inn
class MyEncoder(JSONEncoder): class MyEncoder(JSONEncoder):
def default(self, obj): def default(self, obj):
if(isinstance(obj,Sysjobs) or isinstance(obj,Sysjobhistory)): if(isinstance(obj,Sysjobs) or isinstance(obj,Sysjobhistory)):
@@ -20,6 +20,16 @@ class MyEncoder(JSONEncoder):
Base = declarative_base() Base = declarative_base()
class DataflowManagement_JobAfhaengighed(Base):
__tablename__ = "JobAfhaengighed"
__table_args__ = { "schema": "dataflowmanagement.flw" }
ParentJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
ChildJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
OprettetDato: Mapped[datetime] = Column(DateTime)
AendretDato: Mapped[datetime] = Column(DateTime)
parent: Mapped[List["Sysjobs"]] = relationship(back_populates="parents",foreign_keys=[ParentJobID])
child: Mapped[List["Sysjobs"]] = relationship(back_populates="children",foreign_keys=[ChildJobID])
class Sysjobs(Base): class Sysjobs(Base):
__tablename__ = "sysjobs" __tablename__ = "sysjobs"
job_id: Mapped[str] = mapped_column(UNIQUEIDENTIFIER,primary_key=True) job_id: Mapped[str] = mapped_column(UNIQUEIDENTIFIER,primary_key=True)
@@ -27,6 +37,24 @@ class Sysjobs(Base):
enabled=Column(TINYINT) enabled=Column(TINYINT)
sysjobhistories: Mapped[List["Sysjobhistory"]] = relationship(back_populates="sysjob") sysjobhistories: Mapped[List["Sysjobhistory"]] = relationship(back_populates="sysjob")
dataflow_jobs: Mapped[List["DataflowManagement_JobListe"]] = relationship(back_populates="sysjob") dataflow_jobs: Mapped[List["DataflowManagement_JobListe"]] = relationship(back_populates="sysjob")
sysjobsteps: Mapped[List["msdb_sysjobsteps"]] = relationship(back_populates="sysjob")
# children: Mapped[List["Sysjobs"]] = relationship(secondary="JobAfhaengighed",back_populates="parents",
# primaryjoin="Sysjobs.job_id==DataflowManagement_JobAfhaengighed.ParentJobID",
# secondaryjoin="Sysjobs.job_id==DataflowManagement_JobAfhaengighed.ChildJobID")
# parents: Mapped[List["Sysjobs"]] =relationship(back_populates="children",
# secondary="JobAfhaengighed",
# primaryjoin="Sysjobs.job_id==DataflowManagement_JobAfhaengighed.ChildJobID",
# secondaryjoin="Sysjobs.job_id==DataflowManagement_JobAfhaengighed.ParentJobID"
# )
# children: Mapped[List["Sysjobs"]] = relationship(secondary='JobAfhaengighed',back_populates="parents",
# primaryjoin=job_id==DataflowManagement_JobAfhaengighed.ParentJobID,
# secondaryjoin=job_id==DataflowManagement_JobAfhaengighed.ChildJobID)
# parents: Mapped[List["Sysjobs"]] =relationship(back_populates="children",
# secondary='JobAfhaengighed',
# primaryjoin=job_id==DataflowManagement_JobAfhaengighed.ChildJobID,
# secondaryjoin=job_id==DataflowManagement_JobAfhaengighed.ParentJobID
# )
parents: Mapped[List["DataflowManagement_JobAfhaengighed"]] =relationship(back_populates="child", foreign_keys="DataflowManagement_JobAfhaengighed.ChildJobID") parents: Mapped[List["DataflowManagement_JobAfhaengighed"]] =relationship(back_populates="child", foreign_keys="DataflowManagement_JobAfhaengighed.ChildJobID")
children: Mapped[List["DataflowManagement_JobAfhaengighed"]] = relationship(back_populates="parent", foreign_keys="DataflowManagement_JobAfhaengighed.ParentJobID") children: Mapped[List["DataflowManagement_JobAfhaengighed"]] = relationship(back_populates="parent", foreign_keys="DataflowManagement_JobAfhaengighed.ParentJobID")
@@ -53,13 +81,13 @@ class Sysjobs(Base):
def getTest(self,session: Session): def getTest(self,session: Session):
stmt = Select(Sysjobs).join(Sysjobhistory).where(Sysjobhistory.run_date>20230601).distinct() stmt = Select(Sysjobs).join(Sysjobhistory).where(Sysjobhistory.run_date>20230601).distinct()
print(stmt) print(stmt)
with Session(engine) as session: with Session(session) as session:
row : Sysjobs row : Sysjobs
res = session.execute(stmt).all() res = session.execute(stmt).all()
for row in res: for row in res:
print(row.Sysjobs.name + ' ' + str(row.Sysjobhistory.run_date) + ' ' + str(row.Sysjobhistory.run_time)) print(row.Sysjobs.name + ' ' + str(row.Sysjobhistory.run_date) + ' ' + str(row.Sysjobhistory.run_time))
def getMedianDag(self,ugeDag: int,stepID: int,antalDage: int): def getMedianDag(self,stepID: int,antalDage: int,ugeDag: int=0):
session=Session.object_session(self) session=Session.object_session(self)
sqlStr='''DECLARE @dage int = :antalDage sqlStr='''DECLARE @dage int = :antalDage
DECLARE @job VARCHAR(200) = :selfName DECLARE @job VARCHAR(200) = :selfName
@@ -78,7 +106,7 @@ FROM
j.name as job_name, j.name as job_name,
run_datetime = CONVERT(DATETIME, '1970-01-01') + run_datetime = CONVERT(DATETIME, '1970-01-01') +
(run_time * 9 + run_time % 10000 * 6 + run_time % 100 * 10) / 216e4, (run_time * 9 + run_time % 10000 * 6 + run_time % 100 * 10) / 216e4,
run_duration = (run_duration/10000*3600 + (run_duration/100)%100*60 + run_duration%100)*100 run_duration = (run_duration/10000*3600 + (run_duration/100)%100*60 + run_duration%100)
from msdb..sysjobhistory h from msdb..sysjobhistory h
inner join msdb..sysjobs j inner join msdb..sysjobs j
ON h.job_id = j.job_id ON h.job_id = j.job_id
@@ -94,14 +122,21 @@ FROM
) t ) t
) )
SELECT [ctedateconversion].[MedianRun_datetime] AS MedianRun_datetime_SpecificDate,[ctedateconversion].[run_duration] SELECT [ctedateconversion].[MedianRun_datetime] AS MedianRun_datetime_SpecificDate,DATEADD(SECOND,[ctedateconversion].[run_duration],[ctedateconversion].[MedianRun_datetime])
FROM [ctedateconversion] FROM [ctedateconversion]
WHERE [ctedateconversion].[MedianRun_datetime] = [ctedateconversion].[run_datetime] WHERE [ctedateconversion].[MedianRun_datetime] = [ctedateconversion].[run_datetime]
GROUP BY [ctedateconversion].[MedianRun_datetime], GROUP BY [ctedateconversion].[MedianRun_datetime],
[ctedateconversion].[run_duration]''' [ctedateconversion].[run_duration]'''
stmt=text(sqlStr).params(antalDage=antalDage,selfName=self.name,ugeDag=ugeDag,stepID=stepID) stmt=text(sqlStr).params(antalDage=antalDage,selfName=self.name,ugeDag=ugeDag,stepID=stepID)
res=session.execute(stmt).all() res=session.execute(stmt).all()
return res resResult=[]
if(len(res)>0):
resResult=list(res[0])
if(resResult[0]<datetime(1970, 1, 1, 20, 0,0) and self.name!='BI - Flow - Batch Start Daglig kl. 20.00'):
resResult[0]+= timedelta(days=1)
if(resResult[1]<datetime(1970, 1, 1, 20, 0,0) and self.name!='BI - Flow - Batch Start Daglig kl. 20.00'):
resResult[1]+= timedelta(days=1)
return resResult
def printParent(self, sysjobs:List['Sysjobs']): def printParent(self, sysjobs:List['Sysjobs']):
@@ -118,21 +153,35 @@ GROUP BY [ctedateconversion].[MedianRun_datetime],
def getNattensKoersel(session) -> List['Sysjobs']: def getNattensKoersel(session) -> List['Sysjobs']:
natStat=(datetime.today()-timedelta(days=1)).replace(hour=20,minute=0,second=0,microsecond=0) natStat=(datetime.today()-timedelta(days=1)).replace(hour=20,minute=0,second=0,microsecond=0)
resReturn: List['Sysjobs'] = list() resReturn: List['Sysjobs'] = list()
stmt = Select(Sysjobs,Sysjobhistory).join(DataflowManagement_JobListe).join(Sysjobhistory).join(DataflowManagement_JobsForExecution).where(Sysjobhistory.step_id==0).where(DataflowManagement_JobListe.Aktiv==1).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d'))))).distinct() stmt = Select(Sysjobs).join(DataflowManagement_JobListe).join(Sysjobhistory).join(DataflowManagement_JobsForExecution).where(Sysjobhistory.step_id==0).where(DataflowManagement_JobListe.Aktiv==1).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d'))))).distinct()
row : Sysjobs row : Sysjobs
res = session.execute(stmt).all() res = session.execute(stmt).all()
return res return res
def getSysjob(session, sysjobName: str): def getSysjob(session, sysjobName: str, fullName:bool=True,historikDage: int=0):
resReturn=None resReturn=None
natStat=(datetime.today()-timedelta(days=historikDage)).replace(hour=20,minute=0,second=0,microsecond=0)
if(sysjobName!=None): if(sysjobName!=None):
stmt = Select(Sysjobs).where(Sysjobs.name==sysjobName) stmt=Select(Sysjobs)
if(historikDage>0):
stmt=stmt.join(Sysjobhistory).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d')))))
if(fullName==False):
stmt = stmt.where(Sysjobs.name.like(sysjobName))
else:
stmt = stmt.where(Sysjobs.name==sysjobName)
if(historikDage>0):
stmt=stmt.options(contains_eager(Sysjobs.sysjobhistories))
try: try:
resReturn=session.execute(stmt) resReturn=session.execute(stmt).unique().all()
except: except:
session.rollback() session.rollback()
return resReturn return resReturn
def getSmallestWaitOfParent(self):
return
class Sysjobhistory(Base): class Sysjobhistory(Base):
__tablename__ = "sysjobhistory" __tablename__ = "sysjobhistory"
@@ -206,21 +255,14 @@ class DataflowManagement_JobListe(Base):
Aktiv=Column(BOOLEAN) Aktiv=Column(BOOLEAN)
sysjob: Mapped["Sysjobs"] = relationship(back_populates="dataflow_jobs") sysjob: Mapped["Sysjobs"] = relationship(back_populates="dataflow_jobs")
class DataflowManagement_JobAfhaengighed(Base):
__tablename__ = "JobAfhaengighed"
__table_args__ = { "schema": "dataflowmanagement.flw" }
ParentJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
ChildJobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"), primary_key=True)
OprettetDato: Mapped[datetime] = Column(DateTime)
AendretDato: Mapped[datetime] = Column(DateTime)
parent: Mapped[List["Sysjobs"]] = relationship(back_populates="parents",foreign_keys=[ParentJobID])
child: Mapped[List["Sysjobs"]] = relationship(back_populates="children",foreign_keys=[ChildJobID])
class DataflowManagement_JobMasterSetup(Base): class DataflowManagement_JobMasterSetup(Base):
__tablename__ = "JobMasterSetup" __tablename__ = "JobMasterSetup"
__table_args__ = { "schema": "dataflowmanagement.flw" } __table_args__ = { "schema": "dataflowmanagement.flw" }
JobID: Mapped[str] = mapped_column(UNIQUEIDENTIFIER,primary_key=True) JobID: Mapped[str] = mapped_column(UNIQUEIDENTIFIER,primary_key=True)
CurrentBatchID: Mapped[int] = mapped_column(INTEGER) CurrentBatchID: Mapped[int] = mapped_column(INTEGER)
MinMellemAfvikling: Mapped[int] = mapped_column(INTEGER)
def getCurrentBatchId(session): def getCurrentBatchId(session):
stmt = Select(DataflowManagement_JobMasterSetup) stmt = Select(DataflowManagement_JobMasterSetup)
@@ -231,5 +273,49 @@ class DataflowManagement_JobsForExecution(Base):
__tablename__ = "JobsForExecution" __tablename__ = "JobsForExecution"
__table_args__ = { "schema": "dataflowmanagement.flw" } __table_args__ = { "schema": "dataflowmanagement.flw" }
JobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"),primary_key=True) JobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"),primary_key=True)
BatchID: Mapped[int] = mapped_column("JobMasterSetup.CurrentBatchID") BatchID: Mapped[int] = mapped_column(INTEGER)
ExecutionID: Mapped[int] = mapped_column(primary_key=True)
class msdb_sysjobsteps(Base):
__tablename__ = "sysjobsteps"
__table_args__ = { "schema": "msdb.dbo" }
job_id:Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"),primary_key=True)
step_id: Mapped[int] = mapped_column(INTEGER,primary_key=True)
step_uid: Mapped[str] = mapped_column(UNIQUEIDENTIFIER)
command:Mapped[str] = mapped_column(NVARCHAR)
sysjob: Mapped["Sysjobs"] = relationship(back_populates="sysjobsteps")
class biadmin_log_ActiveSessionsByInspari(Base):
__tablename__="ActiveSessionsByInspari"
__table_args__={"schema":"biadmin.log"}
session_id:Mapped[int]=mapped_column(SMALLINT,primary_key=True)
blocking_session_id:Mapped[int]=mapped_column(SMALLINT)
ElapsedTimeSec:Mapped[float]=mapped_column(DECIMAL)
ElapsedTimeMinutes:Mapped[float]=mapped_column(DECIMAL)
request_start_time:Mapped[datetime]=mapped_column(DateTime)
DatabaseName:Mapped[str]=mapped_column(NVARCHAR)
WaitTimeMs:Mapped[int]=mapped_column(INTEGER)
wait_type:Mapped[str]=mapped_column(NVARCHAR)
host_process_id:Mapped[int]=mapped_column(INTEGER)
MemoryGrantRequestTime:Mapped[datetime]=mapped_column(DateTime)
MemoryGrantGrantedTime:Mapped[datetime]=mapped_column(DateTime)
LogDate:Mapped[datetime]=mapped_column(DATETIME,primary_key=True)
def getSessionTimeSpan(session:Session,beginTime:datetime,Endtime:datetime):
stmt=Select(biadmin_log_ActiveSessionsByInspari).where(and_(biadmin_log_ActiveSessionsByInspari.request_start_time>=beginTime,biadmin_log_ActiveSessionsByInspari.request_start_time<=Endtime))
res=session.execute(stmt).all()
return res
def getSessionByID(session:Session,sessionID:int,logDate:datetime=None):
stmt=Select(biadmin_log_ActiveSessionsByInspari).where(and_(biadmin_log_ActiveSessionsByInspari.session_id==sessionID,biadmin_log_ActiveSessionsByInspari.LogDate==logDate))
res=session.execute(stmt).all()
return res
#class AllExecutionMessages(Base):
class BiAdmin_log_WaitingTasks(Base):
__tablename__="WaitingTasks"
__table_args__={"schema":"biadmin.log"}
session_id:Mapped[int]=mapped_column(SMALLINT,primary_key=True)
wait_duration_ms:Mapped[int]=mapped_column(BIGINT)
wait_type:Mapped[str]=mapped_column(NVARCHAR)
blocking_session_id:Mapped[int]=mapped_column(SMALLINT)
LogDate:Mapped[datetime]=mapped_column(DATETIME,primary_key=True)

View File

@@ -5,42 +5,57 @@
<title>FlaskBlog</title> <title>FlaskBlog</title>
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script> <script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript"> <script type="text/javascript">
google.charts.load('current', {'packages':['gantt']}); google.charts.load('current', {'packages':['timeline']});
google.charts.setOnLoadCallback(drawChart); google.charts.setOnLoadCallback(drawChart);
function drawChart() { function drawChart() {
var container = document.getElementById('chart_div');
var chart = new google.visualization.Timeline(container);
var data = new google.visualization.DataTable(); var data = new google.visualization.DataTable();
data.addColumn('string', 'Task ID'); data.addColumn({ type: 'string', id: 'President' });
data.addColumn('string', 'Task Name'); data.addColumn({ type: 'string', id: 'President2' });
data.addColumn('string', 'Resource'); data.addColumn({ type: 'date', id: 'Start' });
data.addColumn('date', 'Start Date'); data.addColumn({ type: 'date', id: 'End' });
data.addColumn('date', 'End Date');
data.addColumn('number', 'Duration');
data.addColumn('number', 'Percent Complete');
data.addColumn('string', 'Dependencies');
data.addRows([ data.addRows([
{% set test = overblikDag.getKoerselsOverblikForDag(1) %}
{% for job in test %} {% for job in test %}
['{{ job[0].name }}','{{ job[0].name }}','{{ job[0].name.split(' ')[4][:3] }}', new Date("{{ job[0].getMedianDag(2,0,30)[0][0] }}"),null,{{ job[0].getMedianDag(2,0,30)[0][1] }},100,''], ['{{ job.name }}','{{ job.name }}',new Date("{{ job.getMedianDag(0,30,2)[0] }}"),new Date("{{ job.getMedianDag(0,30,2)[1] }}")],
{% endfor %} {% endfor %}
['BI - Admin - Log Index Stats2','BI - Admin - Log Index Stats','Log', [new Date("1970-01-01 17:00:00"),new Date("1970-01-01 18:00:00")],null,259000,100,''],
]); ]);
var calHeight = data.getNumberOfRows() * 45
var paddingHeight = 50;
var rowHeight = data.getNumberOfRows() * 25;
var chartHeight = rowHeight + paddingHeight;
var options = { var options = {
height: chartHeight, height: calHeight,
gantt: { timeline: { groupByRowLabel: true }
sortTasks: true,
trackHeight: 30
}
}; };
var chart = new google.visualization.Gantt(document.getElementById('chart_div')); chart.draw(data, options);
}
</script>
<script type="text/javascript">
google.charts.load('current', {'packages':['timeline']});
google.charts.setOnLoadCallback(drawChart);
function drawChart() {
var container = document.getElementById('chart2_div');
var chart = new google.visualization.Timeline(container);
var data = new google.visualization.DataTable();
data.addColumn({ type: 'string', id: 'President' });
data.addColumn({ type: 'string', id: 'President2' });
data.addColumn({ type: 'date', id: 'Start' });
data.addColumn({ type: 'date', id: 'End' });
data.addRows([
{% set test = overblikDag.getKoerselsOverblikForDag(2) %}
{% for job in test %}
['{{ job.name }}','{{ job.name }}',new Date("{{ job.getMedianDag(0,30,2)[0] }}"),new Date("{{ job.getMedianDag(0,30,2)[1] }}")],
{% endfor %}
]);
var calHeight = data.getNumberOfRows() * 45
var options = {
height: calHeight,
timeline: { groupByRowLabel: true }
};
chart.draw(data, options); chart.draw(data, options);
} }
@@ -48,5 +63,6 @@
</head> </head>
<body> <body>
<div id="chart_div"></div> <div id="chart_div"></div>
<div id="chart2_div"></div>
</body> </body>
</html> </html>

62
templates/index4.html Normal file
View File

@@ -0,0 +1,62 @@
<html>
<head>
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript">
google.charts.load('current', {'packages':['gantt']});
google.charts.setOnLoadCallback(drawChart);
var refData = new Array()
{% for job in test4 %}
refData.push([{{ job[0].session_id }},new Date("{{ job[0].LogDate }}")])
{% endfor %}
function drawChart() {
var data = new google.visualization.DataTable();
data.addColumn('string', 'Task ID');
data.addColumn('string', 'Task Name');
data.addColumn('string', 'Resource');
data.addColumn('date', 'Start Date');
data.addColumn('date', 'End Date');
data.addColumn('number', 'Duration');
data.addColumn('number', 'Percent Complete');
data.addColumn('string', 'Dependencies');
data.addRows([
{% for job in test4 %}
['{{ job[0].session_id }}','{{ job[0].DatabaseName }}','{{ job[0].DatabaseName }}', new Date("{{ job[0].request_start_time }}"),new Date(new Date("{{ (job[0].request_start_time) }}").setSeconds(new Date("{{ job[0].request_start_time }}").getSeconds()+{{ job[0].ElapsedTimeSec }})),null,null,null],
{% endfor %}
]);
var paddingHeight = 50;
var rowHeight = data.getNumberOfRows() * 25;
var chartHeight = rowHeight + paddingHeight;
var options = {
height: chartHeight,
gantt: {
sortTasks: true,
trackHeight: 30
}
};
var chart = new google.visualization.Gantt(document.getElementById('chart_div'));
google.visualization.events.addListener(chart, 'select', myClickHandler);
function myClickHandler(){
var selection = chart.getSelection();
var ses=refData[selection[0].row][0]
var ldat=refData[selection[0].row][1].toISOString()
var turl='{{ url_for('test4_getSession' ,sessionID='ses',logdate='ldat') }}'
turl = turl.replace('ses', ses);
turl = turl.replace('ldat', ldat);
window.location.assign(turl)
}
chart.draw(data, options);
}
</script>
</head>
<body>
<div id="chart_div"></div>
</body>
</html>

1
test.js Normal file
View File

@@ -0,0 +1 @@
new Date.today()