diff --git a/FlowAnalyserMain.py b/FlowAnalyserMain.py index d71d8cc..8541c4d 100644 --- a/FlowAnalyserMain.py +++ b/FlowAnalyserMain.py @@ -33,11 +33,18 @@ def create_app(test_config=None): @FlowAnalyserMain.route('/test') def test(): listSysjobs=[] - jobs=['BI - Flow - Batch Start – Daglig kl. 20.00','BI - Admin - Log Index Stats', 'BI - Flow - MDS Backup','BI - Admin - shrink staging filgruppe samt log filer' - ,'BI - Flow - Batch Slut – Daglig kl. 20.00'] - for job in jobs: - listSysjobs.append(list(Sysjobs.getSysjob(session,job))[0]) - return render_template('index.html', test=listSysjobs) + listStartSlutjobs=[] + listStartSlutjobs.append(Sysjobs.getSysjob(session,'BI - Flow - Batch Start – Daglig kl. 20.00',True)[0]) + listStartSlutjobs.append(Sysjobs.getSysjob(session,'BI - Flow - Batch Slut – Daglig kl. 20.00',True)[0]) + listSysjobs.append(listStartSlutjobs[0][0]) + sysjobs=Sysjobs.getSysjob(session,'% - Admin - %',False) + if(sysjobs!=None and len(sysjobs)>0): + for i in sysjobs: + listSysjobs.append(i[0]) + listSysjobs.append(listStartSlutjobs[1][0]) + listSysjobs = [x for x in listSysjobs if len(x.getMedianDag(0,30,2))>0] + listSysjobs = sorted(listSysjobs, key=lambda x: x.getMedianDag(0,30,2)[0]) + return render_template('index.html', test=listSysjobs,startSlut=listStartSlutjobs) @FlowAnalyserMain.route('/test3') def test3(): sysjobs=(Sysjobs.getNattensKoersel(session)) diff --git a/__pycache__/FlowAnalyserMain.cpython-311.pyc b/__pycache__/FlowAnalyserMain.cpython-311.pyc index 0ea8abc..376385b 100644 Binary files a/__pycache__/FlowAnalyserMain.cpython-311.pyc and b/__pycache__/FlowAnalyserMain.cpython-311.pyc differ diff --git a/__pycache__/sysjobs.cpython-311.pyc b/__pycache__/sysjobs.cpython-311.pyc index ace75c6..5c5260d 100644 Binary files a/__pycache__/sysjobs.cpython-311.pyc and b/__pycache__/sysjobs.cpython-311.pyc differ diff --git a/sysjobs.py b/sysjobs.py index ee1e2cf..a888f46 100644 --- a/sysjobs.py +++ b/sysjobs.py @@ -1,9 +1,9 @@ #from __future__ import annotations from typing import List -from sqlalchemy import BOOLEAN, Column,INTEGER,NVARCHAR, ForeignKey,Select, and_, or_,DateTime,text +from sqlalchemy import BOOLEAN, Column,INTEGER,NVARCHAR, ForeignKey,Select, and_, or_,DateTime,text,VARCHAR from sqlalchemy.dialects.mssql import UNIQUEIDENTIFIER, TINYINT -from sqlalchemy.orm import relationship,Session,Mapped,mapped_column +from sqlalchemy.orm import relationship,Session,Mapped,mapped_column,contains_eager from sqlalchemy.ext.declarative import declarative_base from datetime import datetime,timedelta,time import json @@ -27,6 +27,8 @@ class Sysjobs(Base): enabled=Column(TINYINT) sysjobhistories: Mapped[List["Sysjobhistory"]] = relationship(back_populates="sysjob") dataflow_jobs: Mapped[List["DataflowManagement_JobListe"]] = relationship(back_populates="sysjob") + sysjobsteps: Mapped[List["msdb_sysjobsteps"]] = relationship(back_populates="sysjob") + parents: Mapped[List["DataflowManagement_JobAfhaengighed"]] =relationship(back_populates="child", foreign_keys="DataflowManagement_JobAfhaengighed.ChildJobID") children: Mapped[List["DataflowManagement_JobAfhaengighed"]] = relationship(back_populates="parent", foreign_keys="DataflowManagement_JobAfhaengighed.ParentJobID") @@ -59,7 +61,7 @@ class Sysjobs(Base): for row in res: print(row.Sysjobs.name + ' ' + str(row.Sysjobhistory.run_date) + ' ' + str(row.Sysjobhistory.run_time)) - def getMedianDag(self,ugeDag: int,stepID: int,antalDage: int): + def getMedianDag(self,stepID: int,antalDage: int,ugeDag: int=0): session=Session.object_session(self) sqlStr='''DECLARE @dage int = :antalDage DECLARE @job VARCHAR(200) = :selfName @@ -78,7 +80,7 @@ FROM j.name as job_name, run_datetime = CONVERT(DATETIME, '1970-01-01') + (run_time * 9 + run_time % 10000 * 6 + run_time % 100 * 10) / 216e4, - run_duration = (run_duration/10000*3600 + (run_duration/100)%100*60 + run_duration%100)*100 + run_duration = (run_duration/10000*3600 + (run_duration/100)%100*60 + run_duration%100) from msdb..sysjobhistory h inner join msdb..sysjobs j ON h.job_id = j.job_id @@ -94,14 +96,21 @@ FROM ) t ) -SELECT [ctedateconversion].[MedianRun_datetime] AS MedianRun_datetime_SpecificDate,[ctedateconversion].[run_duration] +SELECT [ctedateconversion].[MedianRun_datetime] AS MedianRun_datetime_SpecificDate,DATEADD(SECOND,[ctedateconversion].[run_duration],[ctedateconversion].[MedianRun_datetime]) FROM [ctedateconversion] WHERE [ctedateconversion].[MedianRun_datetime] = [ctedateconversion].[run_datetime] GROUP BY [ctedateconversion].[MedianRun_datetime], [ctedateconversion].[run_duration]''' stmt=text(sqlStr).params(antalDage=antalDage,selfName=self.name,ugeDag=ugeDag,stepID=stepID) res=session.execute(stmt).all() - return res + resResult=[] + if(len(res)>0): + resResult=list(res[0]) + if(resResult[0] List['Sysjobs']: natStat=(datetime.today()-timedelta(days=1)).replace(hour=20,minute=0,second=0,microsecond=0) resReturn: List['Sysjobs'] = list() - stmt = Select(Sysjobs,Sysjobhistory).join(DataflowManagement_JobListe).join(Sysjobhistory).join(DataflowManagement_JobsForExecution).where(Sysjobhistory.step_id==0).where(DataflowManagement_JobListe.Aktiv==1).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d'))))).distinct() + stmt = Select(Sysjobs).join(DataflowManagement_JobListe).join(Sysjobhistory).join(DataflowManagement_JobsForExecution).where(Sysjobhistory.step_id==0).where(DataflowManagement_JobListe.Aktiv==1).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d'))))).distinct() row : Sysjobs + stmt.options(contains_eager(Sysjobs.sysjobhistories), contains_eager(Sysjobs.parents), contains_eager(DataflowManagement_JobAfhaengighed.parent)) res = session.execute(stmt).all() return res - def getSysjob(session, sysjobName: str): + def getSysjob(session, sysjobName: str, fullName:bool=True,historikDage: int=0): resReturn=None + natStat=(datetime.today()-timedelta(days=historikDage)).replace(hour=20,minute=0,second=0,microsecond=0) if(sysjobName!=None): - stmt = Select(Sysjobs).where(Sysjobs.name==sysjobName) + stmt=Select(Sysjobs) + if(historikDage>0): + stmt=stmt.join(Sysjobhistory).where(or_(and_(Sysjobhistory.run_date>=int((natStat.strftime('%Y%m%d'))),(Sysjobhistory.run_time>=int((natStat.strftime('%H%M%S'))))),Sysjobhistory.run_date>=int((datetime.today().strftime('%Y%m%d'))))) + if(fullName==False): + stmt = stmt.where(Sysjobs.name.like(sysjobName)) + else: + stmt = stmt.where(Sysjobs.name==sysjobName) + if(historikDage>0): + stmt=stmt.options(contains_eager(Sysjobs.sysjobhistories)) + try: - resReturn=session.execute(stmt) + resReturn=session.execute(stmt).unique().all() except: session.rollback() return resReturn - + def getSmallestWaitOfParent(self): + return + class Sysjobhistory(Base): __tablename__ = "sysjobhistory" instance_id=Column(INTEGER,primary_key=True) @@ -206,6 +228,7 @@ class DataflowManagement_JobListe(Base): Aktiv=Column(BOOLEAN) sysjob: Mapped["Sysjobs"] = relationship(back_populates="dataflow_jobs") + class DataflowManagement_JobAfhaengighed(Base): __tablename__ = "JobAfhaengighed" __table_args__ = { "schema": "dataflowmanagement.flw" } @@ -221,6 +244,7 @@ class DataflowManagement_JobMasterSetup(Base): __table_args__ = { "schema": "dataflowmanagement.flw" } JobID: Mapped[str] = mapped_column(UNIQUEIDENTIFIER,primary_key=True) CurrentBatchID: Mapped[int] = mapped_column(INTEGER) + MinMellemAfvikling: Mapped[int] = mapped_column(INTEGER) def getCurrentBatchId(session): stmt = Select(DataflowManagement_JobMasterSetup) @@ -231,5 +255,20 @@ class DataflowManagement_JobsForExecution(Base): __tablename__ = "JobsForExecution" __table_args__ = { "schema": "dataflowmanagement.flw" } JobID: Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"),primary_key=True) - BatchID: Mapped[int] = mapped_column("JobMasterSetup.CurrentBatchID") + BatchID: Mapped[int] = mapped_column(INTEGER) + ExecutionID: Mapped[int] = mapped_column(primary_key=True) + +class msdb_sysjobsteps(Base): + __tablename__ = "sysjobsteps" + __table_args__ = { "schema": "msdb.dbo" } + job_id:Mapped[str] = mapped_column(ForeignKey("sysjobs.job_id"),primary_key=True) + step_id: Mapped[int] = mapped_column(INTEGER,primary_key=True) + step_uid: Mapped[str] = mapped_column(UNIQUEIDENTIFIER) + command:Mapped[str] = mapped_column(NVARCHAR) + sysjob: Mapped["Sysjobs"] = relationship(back_populates="sysjobsteps") + + + + +#class AllExecutionMessages(Base): \ No newline at end of file diff --git a/templates/index.html b/templates/index.html index 6f01ed5..d7947ea 100644 --- a/templates/index.html +++ b/templates/index.html @@ -4,43 +4,30 @@ FlaskBlog -