diff --git a/core/cachecontroller/mainmenurls.txt b/core/cachecontroller/mainmenurls.txt index 0af6a4b5..ab8ae24d 100644 --- a/core/cachecontroller/mainmenurls.txt +++ b/core/cachecontroller/mainmenurls.txt @@ -40,3 +40,4 @@ /dash/world/ /dc/sendstalledreport/ /slowtasks/ +/harvester/workers/ diff --git a/core/cachecontroller/schedinstances/Harvester.py b/core/cachecontroller/schedinstances/Harvester.py index 96530d4f..7caf6746 100644 --- a/core/cachecontroller/schedinstances/Harvester.py +++ b/core/cachecontroller/schedinstances/Harvester.py @@ -16,9 +16,9 @@ def getpayload(self): urlsQueue = queue.PriorityQueue(-1) harvList = self.downloadPayloadJSON(HARVESTER_LIST_URL) - if harvList is not None: - for hin in harvList: - urlsQueue.put((self.BASIC_PRIORITY, '/harvester/workers/?instance='+str(hin['instance']))) + if harvList is not None and 'instances' in harvList: + for hin in harvList['instances']: + urlsQueue.put((self.BASIC_PRIORITY, '/harvester/workers/?instance='+str(hin['harvesterid']))) return urlsQueue diff --git a/core/cachecontroller/settingscron.py b/core/cachecontroller/settingscron.py index 1df13112..56c5f2b7 100644 --- a/core/cachecontroller/settingscron.py +++ b/core/cachecontroller/settingscron.py @@ -7,7 +7,7 @@ TIMEOUT_WHEN_DB_LOADED = 5 URL_WITH_BIG_TASKS = '/tasks/?site=ORNL_Titan_MCORE&status=running&json' URL_WITH_ES_TASKS = '/tasks/?eventservice=eventservice&json' -HARVESTER_LIST_URL = '/harvester/instances/?json' +HARVESTER_LIST_URL = '/harvester/instances/?days=7&json' LOG_PATH = "/tmp/cachecontroller.log" PANDA_LOGGER_PATH = "/cephfs/atlpan/filebrowser" MAX_LOG_AGE_DAYS = 3 diff --git a/core/constants.py b/core/constants.py index f48c7529..49197b73 100644 --- a/core/constants.py +++ b/core/constants.py @@ -145,7 +145,7 @@ {'name': 'transformation', 'error': 'transexitcode', 'diag': None, 'title': 'Trf exit code'}, ) -JOB_FIELDS_STANDARD = ( +JOB_FIELDS_ATTR_SUMMARY = ( 'processingtype', 'computingsite', 'jobstatus', @@ -211,3 +211,75 @@ 'attemptnr', 'site' ) + +JOB_FIELDS = ( + 'corecount', + 'jobsubstatus', + 'produsername', + 'cloud', + 'computingsite', + 'cpuconsumptiontime', + 'jobstatus', + 'transformation', + 'prodsourcelabel', + 'specialhandling', + 'vo', + 'modificationtime', + 'pandaid', + 'atlasrelease', + 'jobsetid', + 'processingtype', + 'workinggroup', + 'jeditaskid', + 'taskid', + 'currentpriority', + 'creationtime', + 'starttime', + 'endtime', + 'brokerageerrorcode', + 'brokerageerrordiag', + 'ddmerrorcode', + 'ddmerrordiag', + 'exeerrorcode', + 'exeerrordiag', + 'jobdispatchererrorcode', + 'jobdispatchererrordiag', + 'piloterrorcode', + 'piloterrordiag', + 'superrorcode', + 'superrordiag', + 'taskbuffererrorcode', + 'taskbuffererrordiag', + 'transexitcode', + 'destinationse', + 'homepackage', + 'inputfileproject', + 'inputfiletype', + 'attemptnr', + 'jobname', + 'computingelement', + 'proddblock', + 'destinationdblock', + 'reqid', + 'minramcount', + 'statechangetime', + 'avgvmem', + 'maxvmem', + 'maxpss', + 'maxrss', + 'nucleus', + 'eventservice', + 'nevents', + 'gshare', + 'noutputdatafiles', + 'parentid', + 'actualcorecount', + 'schedulerid', + 'container_name', + 'maxattempt', + 'pilotid', + 'jobmetrics', + 'resourcetype', + 'commandtopilot', + 'cmtconfig' +) diff --git a/core/datacarousel/templates/DataTapeCarouselle.html b/core/datacarousel/templates/DataTapeCarouselle.html index 1c47a055..83815e4d 100644 --- a/core/datacarousel/templates/DataTapeCarouselle.html +++ b/core/datacarousel/templates/DataTapeCarouselle.html @@ -500,8 +500,8 @@ { title: 'Started at', data: 'start_time' }, { title: 'Rucio rule', data: 'rse', "render": function(data, type, full, meta) { - if (data) { - return '' + full['rse'].slice(0,3) + '...' + full['rse'].slice(full['rse'].length-3, full['rse'].length) + ' '; + if (data && "{{ request.session.rucio_ui }}".length > 0) { + return '' + full['rse'].slice(0,3) + '...' + full['rse'].slice(full['rse'].length-3, full['rse'].length) + ' '; } else { return '---' diff --git a/core/datacarousel/templates/DataTapeCaruselTails.html b/core/datacarousel/templates/DataTapeCaruselTails.html index f6d359c5..905d066f 100644 --- a/core/datacarousel/templates/DataTapeCaruselTails.html +++ b/core/datacarousel/templates/DataTapeCaruselTails.html @@ -162,8 +162,8 @@ data: "rucio_rule", sDefaultContent: "-", render: function(data, type, full, meta) { - if (data.length > 0) { - return '' + data + '' + if (data.length > 0 && "{{ request.session.rucio_ui }}".length > 0) { + return '' + data + '' } else { return data; diff --git a/core/harvester/templates/harvesterWorkers.html b/core/harvester/templates/harvesterWorkers.html index 0e3e48d2..90eec0ac 100644 --- a/core/harvester/templates/harvesterWorkers.html +++ b/core/harvester/templates/harvesterWorkers.html @@ -135,7 +135,7 @@
- +
diff --git a/core/harvester/templates/harvestermon.html b/core/harvester/templates/harvestermon.html deleted file mode 100644 index 912e9ea1..00000000 --- a/core/harvester/templates/harvestermon.html +++ /dev/null @@ -1,885 +0,0 @@ -{% extends "_base_core.html" %} -{% load humanize %} -{% load static %} -{% load common_tags %} - -{% block page_title %}Harvester monitor page{% endblock %} -{% block title %}PanDA monitor{% endblock %} -{% block subtitle %} {{ viewParams.selection|safe }} {% endblock %} - -{% block css_page_library %} - - -{% endblock %} - -{% block css_page %} - -{% endblock %} - -{% block js_head_page_library %} - - - - - - - - - -{% endblock %} - -{% block body %} - {% if type == 'instances' %} -
Diagnostic messages from harvester instances
Creation time
- - - - - - - - - - {% for instance in instances %} - - - - - - - - {% endfor %} - - - {#

#} - {# #} -

- - {% elif type == 'workers' %} - - {% for name,value in generalInstanseInfo.items|dictsort:"0.lower" %} - - {% if name == 'Statuses' %} - - - {% elif name == 'Computingsites' %} - - - {% elif name == 'Resourcetypes' %} - - - {% elif name == 'Computingelements' %} - - - {% elif name == 'Harvesters' %} - - - {% elif name == 'wrkpandaids' %} - - - {% elif name == 'Jobscount' %} - - - {% elif name == 'JobsStatuses' %} - - - {% elif name == 'JobsSubStatuses' %} - - - {% else %} - - - {% endif %} - - {% endfor %} -
{{ name }} ({{ value|length }}) - {% for n,v in value.items %} - {{ n }} - ({{ v }}) - {% endfor %} - {{ name }} ({{ value|length }}) -
- {% for n,v in value.items %} - {{ n }} ({{ v }}) - {% endfor %} -
-
{{ name }} ({{ value|length }}) -
- {% for n,v in value.items %} - {{ n }} ({{ v }}) - {% endfor %} -
-
{{ name }} ({{ value|length }}) -
- {% for n,v in value.items %} - {{ n }} ({{ v }}) - {% endfor %} -
-
{{ name }} ({{ value|length }}) -
- {% for n,v in value.items %} - {{ n }} ({{ v }}) - {% endfor %} -
-
WorkerID (jobs) -
- {% for n,v in value.items %} -

{{ n }} ({{ v }}) -

- {% endfor %} -
-
Load more
- || -
Load all
-
Jobs count - {{ value }} - Jobs statuses - {% for jobstatus, jobstatuscount in value.items %} - {{ jobstatus }} - - ({{ jobstatuscount }}) - {% endfor %} - Jobs substatuses - {% for jobsubstatus, jobsubtatuscount in value.items %} - {{ jobsubstatus }} ({{ jobsubtatuscount }}) - {% endfor %} - {{ name }}{{ value }}
- {% if instance or computingsite %} - {# Show dialog messages / worker stats / jobs

#} - {% endif %} -
-
-
- -
- Number of entries: -
Reload table
-

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
- Number of entries: -
Reload table
-
- - - - - - - - - - - - - - - - - - - - -
-
- Number of entries: -
Reload table
-
- - - - {% if instance == 0 %} - - {% else %} - - {% endif %} - - - - - - - - - - - - - - - - - - - -
-
- Number of entries: -
Reload table
-
- - - - - - - - - - - - - - - - - - - - - -
-
-
- {% if instance or computingsite %} - - {% endif %} - - {% endif %} -{% endblock %} - -{% block js_body_page %} - - -{% endblock %} - -{% block help %} - {% include "harvesterInfoHelp.html" with show="all" %} -{% endblock %} diff --git a/core/harvester/templates/harvworkerinfo.html b/core/harvester/templates/harvworkerinfo.html deleted file mode 100644 index 3d92069a..00000000 --- a/core/harvester/templates/harvworkerinfo.html +++ /dev/null @@ -1,74 +0,0 @@ -{% extends "_base_core.html" %} -{% load humanize %} -{% load static %} - -{% block page_title %}Harvester workers summary{% endblock %} -{% block title %}PanDA monitor{% endblock %} -{% block subtitle %}PanDA Harvester Workers Summary {{ viewParams.selection|safe }} {% endblock %} -{% block js_head_page_library %} - -{% endblock %} -{% block body %} - - - - - - - - - {% if error %} - - {% else%} - {% for name,value in workerinfo.items %} - {% if name != 'corrJobs' and name != 'jobsStatuses' and name != 'jobsSubStatuses' and name != 'njobs'%} - - - - {% elif name == 'corrJobs' %} - - - - {% elif name == 'jobsStatuses'%} - - - - {% elif name == 'jobsSubStatuses'%} - - - - {% elif name == 'njobs'%} - - - - {% endif %} - {% endfor %} - {% endif %} -
Harvester worker info
NameValue
{{ error}}
{{ name }}{% if 'http' in value or 'https' in value and value %} {{ value }}{% else %} {{ value }} {% endif %}
Correspondent Jobs -
- {% for pandaid in value %} - {{ pandaid }} - {% endfor %} -
-
jobstatus -
- {% for jobstatus,jobstatuscount in value.items %} - {{ jobstatus }} ({{ jobstatuscount }}) - {% endfor %} -
-
jobsubstatus -
- {% for jobsubstatus,jobsubtatuscount in value.items %} - {{ jobsubstatus }} ({{ jobsubtatuscount }}) - {% endfor %} -
-
{{ name }} {{ value }}
-{% endblock %} - -{% block js_body_page %} - -{% endblock %} diff --git a/core/harvester/urls.py b/core/harvester/urls.py index 52cc26a7..b05a345c 100644 --- a/core/harvester/urls.py +++ b/core/harvester/urls.py @@ -6,15 +6,8 @@ from core.harvester import views as harvester urlpatterns = [ - re_path(r'^harvesters/$', harvester.harvestermon, name='harvesters'), - re_path(r'^workers/$', harvester.workersJSON, name='workers'), - re_path(r'^workersfortask/$', harvester.getHarversterWorkersForTask, name='workersfortask'), - - # legacy, keep to redirect - re_path(r'^harvesterworkersdash/$', harvester.harvestermon), - re_path(r'^harvesterworkerslist/$', harvester.harvestermon), - re_path(r'^harvesterworkerinfo/$', harvester.harvesterWorkerInfo), + re_path(r'^workersfortask/$', harvester.getHarversterWorkersForTask, name='workersfortask'), re_path(r'^harvester/slots/$', harvester.harvesterSlots, name='harvesterSlots'), re_path(r'^harvester/instances/$', harvester.harvesterInstances, name='harvesterInstanceList'), @@ -28,4 +21,11 @@ re_path(r'^harvester/getworkerstats/$', harvester.get_harvester_worker_stats, name='getworkerstats'), re_path(r'^harvester/getjobs/$', harvester.get_harvester_jobs, name='getjobs'), + # legacy, keep to redirect + re_path(r'^harvesterworkersdash/$', harvester.harvesterWorkers), + re_path(r'^harvesterworkerslist/$', harvester.harvesterWorkers), + re_path(r'^harvesterworkerinfo/$', harvester.harvesterWorkerInfo), + re_path(r'^harvesters/$', harvester.harvesters), + re_path(r'^workers/$', harvester.get_harvester_workers), + ] diff --git a/core/harvester/utils.py b/core/harvester/utils.py index e3e1d986..367c87a7 100644 --- a/core/harvester/utils.py +++ b/core/harvester/utils.py @@ -48,6 +48,11 @@ def setup_harvester_view(request, otype='worker'): enddate.strftime(settings.DATETIME_FORMAT), 'yyyy-mm-dd hh24:mi:ss' ) + elif otype == 'instance': + query['lastupdate__range'] = [ + startdate.strftime(settings.DATETIME_FORMAT), + enddate.strftime(settings.DATETIME_FORMAT) + ] if 'instance' in request.session['requestParams'] or 'harvesterid' in request.session['requestParams']: if 'instance' in request.session['requestParams']: @@ -142,16 +147,16 @@ def isHarvesterJob(pandaid): jobHarvesterInfo = [] sqlQuery = f""" - SELECT workerid, HARVESTERID, BATCHLOG, COMPUTINGELEMENT, ERRORCODE, DIAGMESSAGE FROM (SELECT - a.PANDAID, + SELECT workerid, harvesterid, batchlog, computingelement, errorcode, diagmessage from (select + a.pandaid, a.workerid, - a.HARVESTERID, - b.BATCHLOG, - b.COMPUTINGELEMENT, - b.ERRORCODE, - b.DIAGMESSAGE - FROM {settings.DB_SCHEMA_PANDA}.HARVESTER_REL_JOBS_WORKERS a, - {settings.DB_SCHEMA_PANDA}.HARVESTER_WORKERS b + a.harvesterid, + b.batchlog, + b.computingelement, + b.errorcode, + b.diagmessage + from {settings.DB_SCHEMA_PANDA}.harvester_rel_jobs_workers a, + {settings.DB_SCHEMA_PANDA}.harvester_workers b WHERE a.harvesterid = b.harvesterid and a.workerid = b.WORKERID) tmp_sub where pandaid = {pandaid} """ cur = connection.cursor() diff --git a/core/harvester/views.py b/core/harvester/views.py index 70cb743b..57e9de44 100644 --- a/core/harvester/views.py +++ b/core/harvester/views.py @@ -7,7 +7,7 @@ from datetime import datetime, timedelta -from django.db import connection +from django.db import connection, connections from django.http import HttpResponse, JsonResponse from django.shortcuts import render, redirect @@ -17,6 +17,7 @@ from core.libs.cache import setCacheEntry, getCacheEntry from core.libs.exlib import is_timestamp from core.libs.sqlcustom import escape_input +from core.libs.sqlsyntax import interval_last from core.libs.DateEncoder import DateEncoder from core.libs.DateTimeEncoder import DateTimeEncoder from core.oauth.utils import login_customrequired @@ -27,6 +28,7 @@ from core.harvester.utils import get_harverster_workers_for_task, setup_harvester_view from django.conf import settings +import core.constants as const harvesterWorkerStatuses = [ 'missed', 'submitted', 'ready', 'running', 'idle', 'finished', 'failed', 'cancelled' @@ -35,6 +37,41 @@ _logger = logging.getLogger('bigpandamon') +@login_customrequired +def harvesters(request): + """ + It is a view to redirect requests to specific views depending on request params + in the decommissioned 'all in one' /harvesters/ view. + :param request: + :return: redirect + """ + valid, response = initRequest(request) + if not valid: + return response + + if len(request.session['requestParams']) == 0: + # redirect to list of instances page + return redirect('/harvester/instances/') + else: + # redirect to list of workers page + return redirect('/harvester/workers/?{}'.format('&'.join(['{}={}'.format(p, v) for p, v in request.session['requestParams'].items()]))) + + +@login_customrequired +def harvesterWorkerInfoLegacy(request): + """ + Redirecting to /harvester/worker/ view. + :param request: + :return: redirect + """ + valid, response = initRequest(request) + if not valid: + return response + + # redirect to list of workers page + return redirect('/harvester/worker/?{}'.format('&'.join(['{}={}'.format(p, v) for p, v in request.session['requestParams'].items()]))) + + @login_customrequired def harvesterInstances(request): valid, response = initRequest(request) @@ -51,7 +88,10 @@ def harvesterInstances(request): return response xurl = extensibleURL(request) - iquery = {} + if 'days' in request.session['requestParams'] or 'hours' in request.session['requestParams']: + iquery, _ = setup_harvester_view(request, 'instance') + else: + iquery = {} instances = list(HarvesterInstances.objects.filter(**iquery).values()) request.session['viewParams']['selection'] = 'Harvester instances' @@ -372,970 +412,6 @@ def get_harvester_jobs(request): return HttpResponse(json.dumps(harvsterpandaids, cls=DateTimeEncoder), content_type='application/json') - - - - - - - - - - - - - - - - - - - - -# -# -# @login_customrequired -# def harvesterWorkerInfo(request): -# valid, response = initRequest(request) -# if not valid: -# return response -# -# harvesterid = None -# workerid = None -# workerinfo = {} -# workerslist = [] -# error = None -# -# if 'harvesterid' in request.session['requestParams']: -# harvesterid = escape_input(request.session['requestParams']['harvesterid']) -# if 'workerid' in request.session['requestParams']: -# workerid = int(request.session['requestParams']['workerid']) -# -# if harvesterid and workerid: -# tquery = {'harvesterid': harvesterid, 'workerid': workerid} -# workerslist.extend(HarvesterWorkers.objects.filter(**tquery).values()) -# -# if len(workerslist) > 0: -# workerinfo = workerslist[0] -# workerinfo['corrJobs'] = [] -# workerinfo['jobsStatuses'] = {} -# workerinfo['jobsSubStatuses'] = {} -# -# jobs = getHarvesterJobs(request, instance=harvesterid, workerid=workerid) -# -# for job in jobs: -# workerinfo['corrJobs'].append(job['pandaid']) -# if job['jobstatus'] not in workerinfo['jobsStatuses']: -# workerinfo['jobsStatuses'][job['jobstatus']] = 1 -# else: -# workerinfo['jobsStatuses'][job['jobstatus']] += 1 -# if job['jobsubstatus'] not in workerinfo['jobsSubStatuses']: -# workerinfo['jobsSubStatuses'][job['jobsubstatus']] = 1 -# else: -# workerinfo['jobsSubStatuses'][job['jobsubstatus']] += 1 -# for k, v in workerinfo.items(): -# if is_timestamp(k): -# try: -# val = v.strftime(settings.DATETIME_FORMAT) -# workerinfo[k] = val -# except: -# pass -# else: -# workerinfo = None -# else: -# error = "Harvesterid + Workerid is not specified" -# -# data = { -# 'request': request, -# 'error': error, -# 'workerinfo': workerinfo, -# 'harvesterid': harvesterid, -# 'workerid': workerid, -# 'viewParams': request.session['viewParams'], -# 'requestParams': request.session['requestParams'], -# 'built': datetime.now().strftime("%H:%M:%S"), -# } -# if is_json_request(request): -# return HttpResponse(json.dumps(data['workerinfo'], cls=DateEncoder), content_type='application/json') -# else: -# response = render(request, 'harvworkerinfo.html', data, content_type='text/html') -# return response - -@login_customrequired -def harvestermon(request): - - valid, response = initRequest(request) - - data = getCacheEntry(request, "harvester") - - if data is not None: - data = json.loads(data) - data['request'] = request - response = render(request, 'harvestermon.html', data, content_type='text/html') - patch_response_headers(response, cache_timeout=request.session['max_age_minutes'] * 60) - return response - - extra = '1=1' - xurl = extensibleURL(request) - - URL = '' - - if 'instance' in request.session['requestParams']: - instance = request.session['requestParams']['instance'] - - if ('workersstats' in request.session['requestParams'] and 'instance' in request.session['requestParams']): - harvsterworkerstats = [] - tquery = {} - tquery['harvesterid'] = instance - limit = 100 - if 'limit' in request.session['requestParams']: - limit = int(request.session['requestParams']['limit']) - harvsterworkerstat = HarvesterWorkerStats.objects.filter(**tquery).values('computingsite', 'resourcetype', 'status', - 'nworkers','lastupdate').extra( - where=[extra]).order_by('-lastupdate')[:limit] - # dialogs.extend(HarvesterDialogs.objects.filter(**tquery).values('creationtime','modulename', 'messagelevel','diagmessage').filter(**tquery).extra(where=[extra]).order_by('-creationtime')) - old_format = '%Y-%m-%d %H:%M:%S' - new_format = '%d-%m-%Y %H:%M:%S' - for stat in harvsterworkerstat: - stat['lastupdate'] = datetime.strptime(str(stat['lastupdate']), old_format).strftime(new_format) - harvsterworkerstats.append(stat) - return HttpResponse(json.dumps(harvsterworkerstats, cls=DateTimeEncoder), content_type='application/json') - - if ('pandaids' in request.session['requestParams'] and 'instance' in request.session['requestParams']): - - status = '' - computingsite = '' - workerid = '' - days = '' - defaulthours = 24 - resourcetype = '' - computingelement = '' - - if 'status' in request.session['requestParams']: - status = """AND status like '%s'""" % (str(request.session['requestParams']['status'])) - if 'computingsite' in request.session['requestParams']: - computingsite = """AND computingsite like '%s'""" % ( - str(request.session['requestParams']['computingsite'])) - if 'resourcetype' in request.session['requestParams']: - resourcetype = """AND resourcetype like '%s'""" % ( - str(request.session['requestParams']['resourcetype'])) - if 'computingelement' in request.session['requestParams']: - computingelement = """AND computingelement like '%s'""" % ( - str(request.session['requestParams']['computingelement'])) - if 'workerid' in request.session['requestParams']: - workerid = """AND workerid in (%s)""" % (request.session['requestParams']['workerid']) - if 'hours' in request.session['requestParams']: - defaulthours = request.session['requestParams']['hours'] - hours = """AND submittime > CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' hour(3) AS DATE)""" % ( - defaulthours) - else: - hours = """AND submittime > CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' hour(3) AS DATE) """ % ( - defaulthours) - if 'days' in request.session['requestParams']: - days = """AND submittime > CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' day(3) AS DATE) """ % ( - request.session['requestParams']['days']) - hours = '' - defaulthours = int(request.session['requestParams']['days']) * 24 - - harvsterpandaids = [] - - limit = 100 - if 'limit' in request.session['requestParams']: - limit = request.session['requestParams']['limit'] - - sqlQueryJobsStates = """ - select hw.*, cj.jobstatus from ( - select * from {}.harvester_rel_jobs_workers - where harvesterid like '{}' - and workerid in ( - select workerid from ( - select workerid from {}.harvester_workers - where harvesterid like '{}' {} {} {} {} {} {} {} - order by lastupdate desc - ) - where rownum <= {} - ) - ) hw , {}.combined_wait_act_def_arch4 cj - where hw.pandaid = cj.pandaid - """.format( - settings.DB_SCHEMA_PANDA, str(instance), settings.DB_SCHEMA_PANDA, - str(instance), status, computingsite, workerid, days, hours, resourcetype, computingelement, - limit, settings.DB_SCHEMA) - - cur = connection.cursor() - cur.execute(sqlQueryJobsStates) - jobs = cur.fetchall() - columns = [str(i[0]).lower() for i in cur.description] - for job in jobs: - object = dict(zip(columns, job)) - harvsterpandaids.append(object) - - return HttpResponse(json.dumps(harvsterpandaids, cls=DateTimeEncoder), content_type='application/json') - - if 'dialogs' in request.session['requestParams'] and 'instance' in request.session['requestParams']: - dialogs = [] - tquery = {} - tquery['harvesterid'] = instance - limit = 100 - if 'limit' in request.session['requestParams']: - limit = int(request.session['requestParams']['limit']) - dialogsList = HarvesterDialogs.objects.filter(**tquery).values('creationtime','modulename', 'messagelevel','diagmessage').filter(**tquery).extra(where=[extra]).order_by('-creationtime')[:limit] - old_format = '%Y-%m-%d %H:%M:%S' - new_format = '%d-%m-%Y %H:%M:%S' - for dialog in dialogsList: - dialog['creationtime'] = datetime.strptime(str(dialog['creationtime']), old_format).strftime(new_format) - dialogs.append(dialog) - - return HttpResponse(json.dumps(dialogs, cls=DateTimeEncoder), content_type='application/json') - - lastupdateCache = '' - - URL += '?instance=' + request.session['requestParams']['instance'] - status = '' - computingsite = '' - workerid = '' - days = '' - defaulthours = 24 - resourcetype = '' - computingelement = '' - - if 'status' in request.session['requestParams']: - status = """and status like '%s'""" %(str(request.session['requestParams']['status'])) - URL += '&status=' + str(request.session['requestParams']['status']) - if 'computingsite' in request.session['requestParams']: - computingsite = """and computingsite like '%s'""" %(str(request.session['requestParams']['computingsite'])) - URL += '&computingsite=' + str(request.session['requestParams']['computingsite']) - if 'pandaid' in request.session['requestParams']: - pandaid = request.session['requestParams']['pandaid'] - try: - jobsworkersquery, pandaids = getWorkersByJobID(pandaid, request.session['requestParams']['instance']) - except: - message = """pandaid for this instance is not found""" - return HttpResponse(json.dumps({'message': message}), content_type='text/html') - workerid = """and workerid in (%s)""" % (jobsworkersquery) - URL += '&pandaid=' + str(request.session['requestParams']['pandaid']) - if 'resourcetype' in request.session['requestParams']: - resourcetype = """and resourcetype like '%s'""" %(str(request.session['requestParams']['resourcetype'])) - URL += '&resourcetype=' +str(request.session['requestParams']['resourcetype']) - if 'computingelement' in request.session['requestParams']: - computingelement = """and computingelement like '%s'""" %(str(request.session['requestParams']['computingelement'])) - URL += '&computingelement=' + str(request.session['requestParams']['computingelement']) - if 'workerid' in request.session['requestParams']: - workerid = """and workerid in (%s)""" %(request.session['requestParams']['workerid']) - URL += '&workerid=' + str(request.session['requestParams']['workerid']) - if 'hours' in request.session['requestParams']: - defaulthours = request.session['requestParams']['hours'] - hours = """and submittime > CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' hour(3) AS DATE) """ % ( - defaulthours) - URL += '&hours=' + str(request.session['requestParams']['hours']) - else: - hours = """and submittime > CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' hour(3) AS DATE) """ % ( - defaulthours) - if 'days' in request.session['requestParams']: - days = """and submittime > CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' day(3) AS DATE) """ %(request.session['requestParams']['days']) - URL += '&days=' + str(request.session['requestParams']['days']) - hours = '' - defaulthours = int(request.session['requestParams']['days']) * 24 - - sqlQuery = """ - SELECT - ii.harvester_id, - ii.description, - to_char(ii.starttime, 'dd-mm-yyyy hh24:mi:ss') as starttime, - to_char(ii.lastupdate, 'dd-mm-yyyy hh24:mi:ss') as lastupdate, - ii.owner, - ii.hostname, - ii.sw_version, - ii.commit_stamp, - to_char(ww.submittime, 'dd-mm-yyyy hh24:mi:ss') as submittime - FROM - {2}.harvester_instances ii - INNER JOIN - {2}.harvester_workers ww on ww.harvesterid = ii.harvester_id {0} and ii.harvester_id like '{1}' - """.format(hours, str(instance), settings.DB_SCHEMA_PANDA) - - cur = connection.cursor() - cur.execute(sqlQuery) - qinstanceinfo = cur.fetchall() - columns = [str(i[0]).lower() for i in cur.description] - instanceinfo = {} - for info in qinstanceinfo: - instanceinfo = dict(zip(columns, info)) - - if len(qinstanceinfo) == 0: - sqlQuery = """ - SELECT - ii.harvester_id, - ii.description, - to_char(ii.starttime, 'dd-mm-yyyy hh24:mi:ss') as starttime, - to_char(ii.lastupdate, 'dd-mm-yyyy hh24:mi:ss') as lastupdate, - ii.owner, - ii.hostname, - ii.sw_version, - ii.commit_stamp, - to_char(ww.submittime, 'dd-mm-yyyy hh24:mi:ss') as submittime - FROM - {1}.harvester_instances ii INNER JOIN - {1}.harvester_workers ww on ww.harvesterid = ii.harvester_id and ww.submittime = (select max(submittime) - from {1}.harvester_workers - where harvesterid like '{0}') and ii.harvester_id like '{0}' - """.format(str(instance), settings.DB_SCHEMA_PANDA) - - cur = connection.cursor() - cur.execute(sqlQuery) - qinstanceinfo = cur.fetchall() - columns = [str(i[0]).lower() for i in cur.description] - - for info in qinstanceinfo: - instanceinfo = dict(zip(columns, info)) - - if bool(instanceinfo) != True or instanceinfo['submittime'] is None: - message = """Instance is not found OR no workers for this instance or time period""" - return HttpResponse(json.dumps({'message': message}), - content_type='text/html') - _logger.debug('Got instance: {}'.format(time.time() - request.session['req_init_time'])) - - if datetime.strptime(instanceinfo['submittime'], '%d-%m-%Y %H:%M:%S') < datetime.now() - timedelta(hours=24): - days = """AND submittime > CAST(TO_DATE('{0}', 'dd-mm-yyyy hh24:mi:ss') - interval '{1}' day AS DATE)""".format(instanceinfo['submittime'], 1) - daysdelta = (datetime.now() - datetime.strptime(instanceinfo['submittime'], '%d-%m-%Y %H:%M:%S')).days + 1 - URL += '&days=' + str(daysdelta) - hours = '' - defaulthours = daysdelta * 24 - - harvesterWorkersQuery = """ - SELECT * FROM {DB_SCHEMA_PANDA}.HARVESTER_WORKERS - where harvesterid = '{0}' {1} {2} {3} {4} {5} {6} {7}"""\ - .format(str(instance), status, computingsite, workerid, lastupdateCache, - days, hours, resourcetype, computingelement, DB_SCHEMA_PANDA=settings.DB_SCHEMA_PANDA) - harvester_dicts = query_to_dicts(harvesterWorkersQuery) - harvester_list = [] - harvester_list.extend(harvester_dicts) - _logger.debug('Got workers: {}'.format(time.time() - request.session['req_init_time'])) - statusesDict = dict(Counter(harvester['status'] for harvester in harvester_list)) - computingsitesDict = dict(Counter(harvester['computingsite'] for harvester in harvester_list)) - computingelementsDict = dict(Counter(harvester['computingelement'] for harvester in harvester_list)) - resourcetypesDict = dict(Counter(harvester['resourcetype'] for harvester in harvester_list)) - - jobscnt = 0 - for harvester in harvester_list: - if harvester['njobs'] is not None: - jobscnt += harvester['njobs'] - - generalInstanseInfo = {'HarvesterID': instanceinfo['harvester_id'], 'Description': instanceinfo['description'], 'Starttime': instanceinfo['starttime'], - 'Owner': instanceinfo['owner'], 'Hostname': instanceinfo['hostname'], 'Lastupdate': instanceinfo['lastupdate'], 'Computingsites':computingsitesDict, - 'Statuses': statusesDict,'Resourcetypes': resourcetypesDict, 'Computingelements': computingelementsDict,'Software version': instanceinfo['sw_version'], - 'Jobscount': jobscnt, 'Commit stamp': instanceinfo['commit_stamp'] - } - generalInstanseInfo = collections.OrderedDict(generalInstanseInfo) - request.session['viewParams']['selection'] = 'Harvester workers, last %s hours' %(defaulthours) - - data = { - 'generalInstanseInfo': generalInstanseInfo, - 'type': 'workers', - 'instance': instance, - 'computingsite': 0, - 'xurl': xurl, - 'request': request, - 'requestParams': request.session['requestParams'], - 'viewParams': request.session['viewParams'], - 'built': datetime.now().strftime("%H:%M:%S"), - 'url': URL - } - # setCacheEntry(request, transactionKey, json.dumps(generalWorkersList[:display_limit_workers], cls=DateEncoder), 60 * 60, isData=True) - setCacheEntry(request, "harvester", json.dumps(data, cls=DateEncoder), 60 * 20) - _logger.debug('Finished preprocessing: {}'.format(time.time() - request.session['req_init_time'])) - return render(request, 'harvestermon.html', data, content_type='text/html') - - elif 'computingsite' in request.session['requestParams'] and 'instance' not in request.session['requestParams']: - - computingsite = request.session['requestParams']['computingsite'] - - if ('workersstats' in request.session['requestParams'] and 'computingsite' in request.session['requestParams']): - harvsterworkerstats = [] - tquery = {} - tquery['computingsite'] = computingsite - limit = 100 - if 'limit' in request.session['requestParams']: - limit = int(request.session['requestParams']['limit']) - harvsterworkerstat = HarvesterWorkerStats.objects.filter(**tquery).values('harvesterid', 'resourcetype', 'status', - 'nworkers','lastupdate').filter(**tquery).extra( - where=[extra]).order_by('-lastupdate')[:limit] - # dialogs.extend(HarvesterDialogs.objects.filter(**tquery).values('creationtime','modulename', 'messagelevel','diagmessage').filter(**tquery).extra(where=[extra]).order_by('-creationtime')) - old_format = '%Y-%m-%d %H:%M:%S' - new_format = '%d-%m-%Y %H:%M:%S' - for stat in harvsterworkerstat: - stat['lastupdate'] = datetime.strptime(str(stat['lastupdate']), old_format).strftime(new_format) - harvsterworkerstats.append(stat) - - return HttpResponse(json.dumps(harvsterworkerstats, cls=DateTimeEncoder), content_type='application/json') - - if ('dialogs' in request.session['requestParams'] and 'computingsite' in request.session['requestParams']): - dialogs = [] - tquery = {} - instancelist = request.session['requestParams']['instancelist'].split(',') - limit = 100 - if 'limit' in request.session['requestParams']: - limit = int(request.session['requestParams']['limit']) - dialogsList = HarvesterDialogs.objects.filter(**tquery).values('creationtime','modulename', 'messagelevel','diagmessage').filter(harvesterid__in=instancelist).extra(where=[extra]).order_by('-creationtime')[:limit] - # dialogs.extend(HarvesterDialogs.objects.filter(**tquery).values('creationtime','modulename', 'messagelevel','diagmessage').filter(**tquery).extra(where=[extra]).order_by('-creationtime')) - old_format = '%Y-%m-%d %H:%M:%S' - new_format = '%d-%m-%Y %H:%M:%S' - for dialog in dialogsList: - dialog['creationtime'] = datetime.strptime(str(dialog['creationtime']), old_format).strftime(new_format) - dialogs.append(dialog) - return HttpResponse(json.dumps(dialogs, cls=DateTimeEncoder), content_type='application/json') - - if 'pandaids' in request.session['requestParams'] and 'computingsite' in request.session['requestParams']: - - status = '' - computingsite = '' - workerid = '' - days = '' - defaulthours = 24 - resourcetype = '' - computingelement = '' - instance = '' - - if 'instance' not in request.session['requestParams']: - sqlQueryInstances = """ - SELECT harvesterid - FROM ATLAS_PANDA.HARVESTER_WORKERS where computingsite like '%s' group by harvesterid - """ % (request.session['requestParams']['computingsite']) - - cur = connection.cursor() - cur.execute(sqlQueryInstances) - - instances = cur.fetchall() - for ins in instances: - instance += "'" + ins[0] + "'," - instance = instance[:-1] - - if 'status' in request.session['requestParams']: - status = """AND status like '%s'""" % (str(request.session['requestParams']['status'])) - if 'computingsite' in request.session['requestParams']: - computingsite = """AND computingsite like '%s'""" % ( - str(request.session['requestParams']['computingsite'])) - if 'resourcetype' in request.session['requestParams']: - resourcetype = """AND resourcetype like '%s'""" % (str(request.session['requestParams']['resourcetype'])) - if 'computingelement' in request.session['requestParams']: - computingelement = """AND computingelement like '%s'""" % (str(request.session['requestParams']['computingelement'])) - if 'workerid' in request.session['requestParams']: - workerid = """AND workerid in (%s)""" % (request.session['requestParams']['workerid']) - if 'hours' in request.session['requestParams']: - defaulthours = request.session['requestParams']['hours'] - hours = """AND submittime >= CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' hour(3) AS DATE)""" % ( - defaulthours) - else: - hours = """AND submittime >= CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' hour(3) AS DATE) """ % ( - defaulthours) - if 'days' in request.session['requestParams']: - days = """AND submittime >= CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' day(3) AS DATE) """ % ( - request.session['requestParams']['days']) - hours = '' - defaulthours = int(request.session['requestParams']['days'])*24 - - harvsterpandaids = [] - - limit = 100 - - if 'limit' in request.session['requestParams']: - limit = request.session['requestParams']['limit'] - - sqlQueryJobsStates = """ - SELECT hw.*, cj.jobstatus FROM ( - SELECT * from {}.harvester_rel_jobs_workers - where harvesterid in ({}) - and workerid in ( - select workerid from ( - SELECT workerid FROM {}.HARVESTER_WORKERS - where harvesterid in ({}) {} {} {} {} {} {} {} - ORDER by lastupdate DESC - ) - where rownum <= {} - ) - ) hw , {}.combined_wait_act_def_arch4 cj - WHERE hw.pandaid = cj.pandaid - """.format( - settings.DB_SCHEMA_PANDA, - str(instance), - settings.DB_SCHEMA_PANDA, - str(instance), status, computingsite, workerid, days, hours, resourcetype, computingelement, - limit, - settings.DB_SCHEMA - ) - - cur = connection.cursor() - cur.execute(sqlQueryJobsStates) - - jobs = cur.fetchall() - - columns = [str(i[0]).lower() for i in cur.description] - - for job in jobs: - object = {} - object = dict(zip(columns, job)) - harvsterpandaids.append(object) - - return HttpResponse(json.dumps(harvsterpandaids, cls=DateTimeEncoder), content_type='application/json') - - URL += '?computingsite=' + request.session['requestParams']['computingsite'] - status = '' - - workerid = '' - days = '' - defaulthours = 24 - resourcetype = '' - computingelement = '' - - if 'status' in request.session['requestParams']: - status = """AND status like '%s'""" % (str(request.session['requestParams']['status'])) - URL += '&status=' + str(request.session['requestParams']['status']) - if 'workerid' in request.session['requestParams']: - workerid = """AND workerid in (%s)""" % (request.session['requestParams']['workerid']) - URL += '&workerid=' + str(request.session['requestParams']['workerid']) - if 'resourcetype' in request.session['requestParams']: - resourcetype = """AND resourcetype like '%s'""" % (str(request.session['requestParams']['resourcetype'])) - URL += '&resourcetype=' + str(request.session['requestParams']['resourcetype']) - if 'computingelement' in request.session['requestParams']: - computingelement = """AND computingelement like '%s'""" %(str(request.session['requestParams']['computingelement'])) - URL += '&computingelement=' + str(request.session['requestParams']['computingelement']) - if 'hours' in request.session['requestParams']: - defaulthours = request.session['requestParams']['hours'] - hours = """AND submittime > CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' hour(3) AS DATE) """ % ( - defaulthours) - URL += '&hours=' + str(request.session['requestParams']['hours']) - else: - hours = """AND submittime > CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' hour(3) AS DATE) """ % ( - defaulthours) - if 'days' in request.session['requestParams']: - days = """AND submittime > CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' day(3) AS DATE) """ % ( - request.session['requestParams']['days']) - URL += '&days=' + str(request.session['requestParams']['days']) - hours = '' - defaulthours = int(request.session['requestParams']['days']) * 24 - - sqlQuery = """ - select * from {5}.harvester_workers - where computingsite like '{0}' {1} {2} {3} {4} and rownum<=1 - order by workerid desc - """.format(str(computingsite), status, workerid, resourcetype, computingelement, settings.DB_SCHEMA_PANDA) - - workersList = [] - cur = connection.cursor() - cur.execute(sqlQuery) - - harvesterinfo = cur.fetchall() - - columns = [str(i[0]).lower() for i in cur.description] - - for worker in harvesterinfo: - object = dict(zip(columns, worker)) - workersList.append(object) - - if len(workersList) == 0: - message ="""Computingsite is not found OR no workers for this computingsite or time period. - Try using this link (last 365 days)""".format(computingsite) - return HttpResponse(json.dumps({'message': message}), content_type='text/html') - - harvesterworkersquery = """ - SELECT * FROM ATLAS_PANDA.HARVESTER_WORKERS - where computingsite = '{0}' {1} {2} {3} {4} {5} """\ - .format(str(computingsite), status, workerid, days, hours, resourcetype, computingelement) - - harvester_dicts = query_to_dicts(harvesterworkersquery) - - harvester_list = [] - harvester_list.extend(harvester_dicts) - - statusesDict = dict(Counter(harvester['status'] for harvester in harvester_list)) - harvesteridDict = dict(Counter(harvester['harvesterid'] for harvester in harvester_list)) - computingelementsDict = dict(Counter(harvester['computingelement'] for harvester in harvester_list)) - resourcetypesDict = dict(Counter(harvester['resourcetype'] for harvester in harvester_list)) - - jobscnt = 0 - - for harvester in harvester_list: - if harvester['njobs'] is not None: - jobscnt += harvester['njobs'] - - generalInstanseInfo = {'Computingsite': workersList[0]['computingsite'], - # 'Starttime': workersList[0]['insstarttime'], - # 'Hostname': workersList[0]['hostname'], - # 'Lastupdate': workersList[0]['inslastupdate'], - 'Harvesters': harvesteridDict, - 'Statuses': statusesDict, - 'Resourcetypes': resourcetypesDict, - 'Computingelements': computingelementsDict, - # 'Software version': workersList[0]['sw_version'], - # 'Commit stamp': workersList[0]['commit_stamp'] - } - request.session['viewParams']['selection'] = 'Harvester workers, last %s hours' %(defaulthours) - - data = { - 'generalInstanseInfo': generalInstanseInfo, - 'type': 'workers', - 'instance': 0, - 'instancelist': ','.join(harvesteridDict.keys()), - 'computingsite': computingsite, - 'xurl': xurl, - 'request': request, - 'requestParams': request.session['requestParams'], - 'viewParams': request.session['viewParams'], - 'built': datetime.now().strftime("%H:%M:%S"), - 'url': URL - } - # setCacheEntry(request, transactionKey, json.dumps(generalWorkersList[:display_limit_workers], cls=DateEncoder), 60 * 60, isData=True) - setCacheEntry(request, "harvester", json.dumps(data, cls=DateEncoder), 60 * 20) - return render(request, 'harvestermon.html', data, content_type='text/html') - elif 'pandaid' in request.session['requestParams'] and 'computingsite' not in request.session['requestParams'] and 'instance' not in request.session['requestParams']: - - pandaid = request.session['requestParams']['pandaid'] - - workerid = '' - days = '' - defaulthours = 24 - resourcetype = '' - computingelement = '' - status = '' - jobsworkersquery, pandaids = getWorkersByJobID(pandaid) - - if jobsworkersquery == '': - message = """ - No workers for this pandaid or time period. - Try using this link (last 365 days)""".format(pandaid) - return HttpResponse(json.dumps({'message': message}), - content_type='text/html') - URL += '?pandaid=' + request.session['requestParams']['pandaid'] - - if 'hours' in request.session['requestParams']: - defaulthours = request.session['requestParams']['hours'] - hours = """AND submittime >= CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' hour(3) AS DATE) """ % ( - defaulthours) - URL += '&hours=' + str(request.session['requestParams']['hours']) - else: - hours = """AND submittime >= CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' hour(3) AS DATE) """ % ( - defaulthours) - if 'days' in request.session['requestParams']: - days = """AND submittime >= CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' day(3) AS DATE) """ % ( - request.session['requestParams']['days']) - URL += '&days=' + str(request.session['requestParams']['days']) - hours = '' - defaulthours = int(request.session['requestParams']['days']) * 24 - if hours != '': - jobsworkersquery += ' ' + hours - if days != '': - jobsworkersquery += ' ' + days - if 'status' in request.session['requestParams']: - status = """AND status like '%s'""" % (str(request.session['requestParams']['status'])) - URL += '&status=' + str(request.session['requestParams']['status']) - if 'workerid' in request.session['requestParams']: - workerid = """AND workerid in (%s)""" % (request.session['requestParams']['workerid']) - URL += '&workerid=' + str(request.session['requestParams']['workerid']) - if 'resourcetype' in request.session['requestParams']: - resourcetype = """AND resourcetype like '%s'""" % (str(request.session['requestParams']['resourcetype'])) - URL += '&resourcetype=' + str(request.session['requestParams']['resourcetype']) - if 'computingelement' in request.session['requestParams']: - computingelement = """AND computingelement like '%s'""" %(str(request.session['requestParams']['computingelement'])) - URL += '&computingelement=' + str(request.session['requestParams']['computingelement']) - - sqlQueryHarvester = """ - SELECT harvesterid, count(*) FROM ATLAS_PANDA.HARVESTER_WORKERS - where (%s) %s %s %s %s group by harvesterid - """ % (jobsworkersquery, status, workerid, resourcetype, computingelement) - - sqlQueryStatus = """ - SELECT status,count(*) FROM ATLAS_PANDA.HARVESTER_WORKERS - where (%s) %s %s %s %s group by status - """ % (jobsworkersquery, status, workerid, resourcetype, computingelement) - - sqlQueryResource = """ - SELECT RESOURCETYPE,count(*) FROM ATLAS_PANDA.HARVESTER_WORKERS - where (%s) %s %s %s %s group by RESOURCETYPE - """ % (jobsworkersquery, status, workerid, resourcetype, computingelement) - - sqlQueryCE = """ - SELECT COMPUTINGELEMENT,count(*) FROM ATLAS_PANDA.HARVESTER_WORKERS - where (%s) %s %s %s %s group by COMPUTINGELEMENT - """ % (jobsworkersquery, status, workerid, resourcetype, computingelement) - - sqlQueryComputingsite = """ - SELECT COMPUTINGSITE,count(*) FROM ATLAS_PANDA.HARVESTER_WORKERS - where (%s) %s %s %s %s group by COMPUTINGSITE - """ % (jobsworkersquery, status, workerid, resourcetype, computingelement) - - cur = connection.cursor() - - cur.execute(sqlQueryHarvester) - harvesterids = cur.fetchall() - - cur.execute(sqlQueryStatus) - statuses = cur.fetchall() - - cur.execute(sqlQueryResource) - resourcetypes = cur.fetchall() - - cur.execute(sqlQueryCE) - computingelements = cur.fetchall() - - cur.execute(sqlQueryComputingsite) - computingsites = cur.fetchall() - - harvesteridDict = {} - - for harvester in harvesterids: - harvesteridDict[harvester[0]] = harvester[1] - - if len(harvesteridDict) == 0: - message = """ - No workers for this pandaid or time period. - Try using this link (last 365 days) - """ % ( - pandaid) - return HttpResponse(json.dumps({'message': message}), content_type='text/html') - - computingsitesDict = {} - - for computingsite in computingsites: - computingsitesDict[computingsite[0]] = computingsite[1] - - statusesDict = {} - for status in statuses: - statusesDict[status[0]] = status[1] - - resourcetypesDict = {} - for resourcetype in resourcetypes: - resourcetypesDict[resourcetype[0]] = resourcetype[1] - - computingelementsDict = {} - for computingelement in computingelements: - computingelementsDict[computingelement[0]] = computingelement[1] - - for harvester in pandaids.keys(): - if harvester not in harvesteridDict.keys(): - del pandaids[harvester] - - generalInstanseInfo = {'JobID': ' '.join(pandaids.values()), 'Harvesters': harvesteridDict, - 'Statuses': statusesDict, - 'Resourcetypes':resourcetypesDict, 'Computingelements': computingelementsDict, - 'Computingsites': computingsitesDict} - - request.session['viewParams']['selection'] = 'Harvester workers, last %s hours' % defaulthours - - data = { - 'generalInstanseInfo': generalInstanseInfo, - 'type': 'workers', - 'instance': ','.join(list(harvesteridDict.keys())), - 'xurl': xurl, - 'request': request, - 'requestParams': request.session['requestParams'], - 'viewParams': request.session['viewParams'], - 'built': datetime.now().strftime("%H:%M:%S"), - 'url': URL - } - # setCacheEntry(request, transactionKey, json.dumps(generalWorkersList[:display_limit_workers], cls=DateEncoder), 60 * 60, isData=True) - setCacheEntry(request, "harvester", json.dumps(data, cls=DateEncoder), 60 * 20) - return render(request, 'harvestermon.html', data, content_type='text/html') - else: - sqlQuery = f""" - SELECT HARVESTER_ID as HARVID, - SW_VERSION, - DESCRIPTION, - COMMIT_STAMP, - to_char(LASTUPDATE, 'dd-mm-yyyy hh24:mi:ss') as LASTUPDATE - FROM {settings.DB_SCHEMA_PANDA}.HARVESTER_INSTANCES - """ - instanceDictionary = [] - - cur = connection.cursor() - cur.execute(sqlQuery) - - for instance in cur: - instanceDictionary.append( - {'instance': instance[0], - 'sw_version':instance[1], - 'commit_stamp':instance[2], - 'descr': instance[3],'lastupdate':instance[4]} - ) - - request.session['viewParams']['selection'] = 'Harvester instances' - - data = { - 'instances':instanceDictionary, - 'type': 'instances', - 'xurl': xurl, - 'request':request, - 'requestParams': request.session['requestParams'], - 'viewParams': request.session['viewParams'] - } - #data =json.dumps(data,cls=DateEncoder) - if (not (('HTTP_ACCEPT' in request.META) and (request.META.get('HTTP_ACCEPT') in ('application/json'))) and ( - 'json' not in request.session['requestParams'])): - return render(request, 'harvestermon.html', data, content_type='text/html') - else: - return HttpResponse(json.dumps(instanceDictionary, cls=DateTimeEncoder), content_type='application/json') - - -def workersJSON(request): - - valid, response = initRequest(request) - - xurl = extensibleURL(request) - - if '_' in request.session['requestParams']: - xurl = xurl.replace('_={0}&'.format(request.session['requestParams']['_']), '') - - data = getCacheEntry(request, xurl, isData=True) - - if data is not None: - data = json.loads(data) - return HttpResponse(json.dumps(data, cls=DateTimeEncoder), content_type='application/json') - - status = '' - computingsite = '' - workerid = '' - days = '' - defaulthours = 24 - lastupdateCache = '' - resourcetype = '' - computingelement = '' - - - if 'status' in request.session['requestParams']: - status = """AND status like '%s'""" % (str(request.session['requestParams']['status'])) - if 'computingsite' in request.session['requestParams']: - computingsite = """AND computingsite like '%s'""" % ( - str(request.session['requestParams']['computingsite'])) - if 'workerid' in request.session['requestParams']: - workerid = """AND workerid in (%s)""" % (request.session['requestParams']['workerid']) - if 'hours' in request.session['requestParams']: - defaulthours = request.session['requestParams']['hours'] - hours = """AND submittime > CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' hour(3) AS DATE) """ % ( - defaulthours) - else: hours = """AND submittime > CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' hour(3) AS DATE) """ % ( - defaulthours) - if 'days' in request.session['requestParams']: - days = """AND submittime > CAST(sys_extract_utc(SYSTIMESTAMP) - interval '%s' day(3) AS DATE) """ % ( - request.session['requestParams']['days']) - hours = '' - defaulthours = int(request.session['requestParams']['days']) * 24 - if 'resourcetype' in request.session['requestParams']: - resourcetype = """AND resourcetype like '%s'""" % ( - str(request.session['requestParams']['resourcetype'])) - if 'computingelement' in request.session['requestParams']: - computingelement = """AND computingelement like '%s'""" % ( - str(request.session['requestParams']['computingelement'])) - if 'instance' in request.session['requestParams']: - instance = request.session['requestParams']['instance'] - if 'pandaid' in request.session['requestParams']: - pandaid = request.session['requestParams']['pandaid'] - jobsworkersquery, pandaids = getWorkersByJobID(pandaid,instance) - workerid = """AND workerid in (%s)""" % (jobsworkersquery) - if ('dt' in request.session['requestParams']): - if 'display_limit_workers' in request.session['requestParams']: - display_limit_workers = int(request.session['requestParams']['display_limit_workers']) - else: - display_limit_workers = 1000 - - generalWorkersFields = ['workerid', 'status', 'batchid', 'nodeid', 'queuename', 'computingsite','harvesterid', - 'submittime', 'lastupdate', 'starttime', 'endtime', 'ncore', 'errorcode', - 'stdout', 'stderr', 'batchlog', 'resourcetype', 'nativeexitcode', 'nativestatus', - 'diagmessage', 'njobs', 'computingelement','jdl'] - - fields = ','.join(generalWorkersFields) - - sqlquery = f""" - SELECT * FROM (SELECT %s FROM {settings.DB_SCHEMA_PANDA}.HARVESTER_WORKERS - where harvesterid like '%s' %s %s %s %s %s %s %s %s - order by submittime DESC) WHERE ROWNUM<=%s - """ % (fields, str(instance), status, computingsite, workerid, lastupdateCache, days, hours, resourcetype, computingelement, display_limit_workers) - - cur = connection.cursor() - cur.execute(sqlquery) - columns = [str(i[0]).lower() for i in cur.description] - workersList = [] - - for worker in cur: - object = {} - object = dict(zip(columns, worker)) - workersList.append(object) - if 'key' not in request.session['requestParams']: - setCacheEntry(request, xurl, json.dumps(workersList, cls=DateTimeEncoder), 60 * 20, isData = True) - - return HttpResponse(json.dumps(workersList, cls=DateTimeEncoder), content_type='application/json') - - elif 'computingsite' in request.session['requestParams'] and 'instance' not in request.session['requestParams']: - computingsite = request.session['requestParams']['computingsite'] - if ('dt' in request.session['requestParams']): - if 'display_limit_workers' in request.session['requestParams']: - display_limit_workers = int(request.session['requestParams']['display_limit_workers']) - else: - display_limit_workers = 1000 - - generalWorkersFields = ['workerid', 'status', 'batchid', 'nodeid', 'queuename', 'computingsite','harvesterid', - 'submittime', 'lastupdate', 'starttime', 'endtime', 'ncore', 'errorcode', - 'stdout', 'stderr', 'batchlog', 'resourcetype', 'nativeexitcode', 'nativestatus', - 'diagmessage', 'njobs', 'computingelement','jdl'] - - fields = ','.join(generalWorkersFields) - sqlquery = """ - SELECT * FROM (SELECT %s FROM ATLAS_PANDA.HARVESTER_WORKERS - where computingsite like '%s' %s %s %s %s %s %s - order by submittime DESC) WHERE ROWNUM <= %s - """ % (fields, str(computingsite), status, workerid, days, hours, resourcetype, computingelement, display_limit_workers) - - workers = connection.cursor() - workers.execute(sqlquery) - columns = [str(i[0]).lower() for i in workers.description] - workersList = [] - - for worker in workers: - object = {} - object = dict(zip(columns, worker)) - workersList.append(object) - if 'key' not in request.session['requestParams']: - setCacheEntry(request, xurl, json.dumps(workersList, cls=DateTimeEncoder), 60 * 20, isData = True) - - return HttpResponse(json.dumps(workersList, cls=DateTimeEncoder), content_type='application/json') - - elif 'pandaid' in request.session['requestParams'] and 'computingsite' not in request.session[ - 'requestParams'] and 'instance' not in request.session['requestParams']: - pandaid = request.session['requestParams']['pandaid'] - jobsworkersquery, pandaids = getWorkersByJobID(pandaid) - if hours != '': - jobsworkersquery += ' ' + hours - if days != '': - jobsworkersquery += ' ' + days - - if ('dt' in request.session['requestParams']): - if 'display_limit_workers' in request.session['requestParams']: - display_limit_workers = int(request.session['requestParams']['display_limit_workers']) - else: - display_limit_workers = 1000 - - generalWorkersFields = ['workerid', 'status', 'batchid', 'nodeid', 'queuename', 'computingsite','harvesterid', - 'submittime', 'lastupdate', 'starttime', 'endtime', 'ncore', 'errorcode', - 'stdout', 'stderr', 'batchlog', 'resourcetype', 'nativeexitcode', 'nativestatus', - 'diagmessage', 'njobs', 'computingelement','jdl'] - - fields = ','.join(generalWorkersFields) - sqlquery = """ - SELECT * FROM(SELECT %s FROM ATLAS_PANDA.HARVESTER_WORKERS - where (%s) %s %s %s %s - order by submittime DESC) WHERE ROWNUM<=%s - """ % (fields, jobsworkersquery, status, workerid, resourcetype,computingelement, display_limit_workers) - - cur = connection.cursor() - cur.execute(sqlquery) - columns = [str(i[0]).lower() for i in cur.description] - workersList = [] - - for worker in cur: - object = {} - object = dict(zip(columns, worker)) - workersList.append(object) - - return HttpResponse(json.dumps(workersList, cls=DateTimeEncoder), content_type='application/json') - @login_customrequired def harvesterSlots(request): valid, response = initRequest(request) @@ -1367,72 +443,8 @@ def harvesterSlots(request): return render(request, 'harvesterSlots.html', data, content_type='text/html') -def getWorkersByJobID(pandaid, instance=''): - - instancequery = '' - - if '|' in pandaid: - pandaid = 'where pandaid in (' + pandaid.replace('|', ',') + ')' - elif ',' in pandaid: - pandaid = 'where pandaid in (' + pandaid + ')' - else: - pandaid = 'where pandaid = ' + pandaid - - if instance != '': - instancequery = """ AND harvesterid like '%s' """ %(instance) - - sqlQuery = """ - select harvesterid, workerid, pandaid from {}.harvester_rel_jobs_workers {} {} - """.format(settings.DB_SCHEMA_PANDA, pandaid, instancequery) - - cur = connection.cursor() - cur.execute(sqlQuery) - - reljobsworkers = cur.fetchall() - - workersList = {} - pandaidList = {} - - for worker in reljobsworkers: - workersList.setdefault(worker[0], []).append(str(worker[1])) - pandaidList[worker[0]] = str(worker[2]) - - jobsworkersquery = '' - - instances = workersList.keys() - cntinstances = len(instances) - - if instance != '': - jobsworkersquery = ', '.join(workersList[instance]) - - else: - for instance in instances: - jobsworkersquery += 'harvesterid like \'{0}\' and workerid in ({1})'.format(instance,', '.join(workersList[instance])) - if cntinstances > 1: - jobsworkersquery += ' OR ' - cntinstances = cntinstances - 1 - - return jobsworkersquery, pandaidList - - -def query_to_dicts(query_string, *query_args): - from itertools import zip_longest as izip - - cursor = connection.cursor() - cursor.execute(query_string, query_args) - col_names = [str(desc[0]).lower() for desc in cursor.description] - while True: - row = cursor.fetchone() - if row is None: - break - - row_dict = dict(izip(col_names, row)) - yield row_dict - return - - def getHarvesterJobs(request, instance='', workerid='', jobstatus='', fields='', **kwargs): - ''' + """ Get jobs list for the particular harvester instance and worker :param request: request object :param instance: harvester instance @@ -1440,7 +452,7 @@ def getHarvesterJobs(request, instance='', workerid='', jobstatus='', fields='', :param jobstatus: jobs statuses :param fields: jobs fields :return: harvester jobs list - ''' + """ jobsList = [] renamed_fields = { @@ -1468,18 +480,7 @@ def getHarvesterJobs(request, instance='', workerid='', jobstatus='', fields='', from core.pandajob.models import Jobsactive4 values = [f.name for f in Jobsactive4._meta.get_fields() if f.name != 'jobparameters' and f.name != 'metadata'] else: - values = ( - 'corecount', 'jobsubstatus', 'produsername', 'cloud', 'computingsite', 'cpuconsumptiontime', - 'jobstatus', 'transformation', 'prodsourcelabel', 'specialhandling', 'vo', 'modificationtime', - 'pandaid', 'atlasrelease', 'jobsetid', 'processingtype', 'workinggroup', 'jeditaskid', 'taskid', - 'currentpriority', 'creationtime', 'starttime', 'endtime', 'brokerageerrorcode', 'brokerageerrordiag', - 'ddmerrorcode', 'ddmerrordiag', 'exeerrorcode', 'exeerrordiag', 'jobdispatchererrorcode', - 'jobdispatchererrordiag', 'piloterrorcode', 'piloterrordiag', 'superrorcode', 'superrordiag', - 'taskbuffererrorcode', 'taskbuffererrordiag', 'transexitcode', 'destinationse', 'homepackage', - 'inputfileproject', 'inputfiletype', 'attemptnr', 'jobname', 'computingelement', 'proddblock', - 'destinationdblock', 'reqid', 'minramcount', 'statechangetime', 'avgvmem', 'maxvmem', 'maxpss', - 'maxrss', 'nucleus', 'eventservice', 'nevents','gshare','noutputdatafiles','parentid','actualcorecount', - 'schedulerid') + values = list(const.JOB_FIELDS) # rename fields that has '_' in DB but not in model for k, v in renamed_fields.items(): @@ -1545,91 +546,117 @@ def getHarvesterJobs(request, instance='', workerid='', jobstatus='', fields='', return jobsList -def getCeHarvesterJobs(request, computingelment, fields=''): - ''' +def getCeHarvesterJobs(request, computingelement, fields=''): + """ Get jobs for the particular CE - - :param computingelment: harvester computingelement + :param computingelement: harvester computingelement :param fields: list of fields for jobs tables - :return: - ''' - jobList = [] - + :return: job_list + """ + job_list = [] if 'hours' in request.session['requestParams']: - lastupdated_time = "'{0}' hour".format(request.session['requestParams']['hours']) + lastupdated_hours = request.session['requestParams']['hours'] elif 'days' in request.session['requestParams']: - lastupdated_time = "'{0}' day".format(request.session['requestParams']['days']) + lastupdated_hours = int(request.session['requestParams']['days']) * 24 else: - lastupdated_time = "'{0}' hour".format(str(int((request.session['TLAST'] - request.session['TFIRST']).seconds/3600))) + lastupdated_hours = int((request.session['TLAST'] - request.session['TFIRST']).seconds/3600) if fields != '': values = fields else: - if (('HTTP_ACCEPT' in request.META) and (request.META.get('HTTP_ACCEPT') in ('text/json', 'application/json'))) or ( - 'json' in request.session['requestParams']): + if is_json_request(request): values = [] from core.pandajob.models import Jobsactive4 for f in Jobsactive4._meta.get_fields(): - if f.name =='resourcetype': + if f.name == 'resourcetype': values.append('resource_type') - elif f.name !='jobparameters' and f.name != 'metadata': + elif f.name != 'jobparameters' and f.name != 'metadata': values.append(f.name) else: - values = 'corecount', 'jobsubstatus', 'produsername', 'cloud', 'computingsite', 'cpuconsumptiontime', 'jobstatus', 'transformation', 'prodsourcelabel', 'specialhandling', 'vo', 'modificationtime', 'pandaid', 'atlasrelease', 'jobsetid', 'processingtype', 'workinggroup', 'jeditaskid', 'taskid', 'currentpriority', 'creationtime', 'starttime', 'endtime', 'brokerageerrorcode', 'brokerageerrordiag', 'ddmerrorcode', 'ddmerrordiag', 'exeerrorcode', 'exeerrordiag', 'jobdispatchererrorcode', 'jobdispatchererrordiag', 'piloterrorcode', 'piloterrordiag', 'superrorcode', 'superrordiag', 'taskbuffererrorcode', 'taskbuffererrordiag', 'transexitcode', 'destinationse', 'homepackage', 'inputfileproject', 'inputfiletype', 'attemptnr', 'jobname', 'computingelement', 'proddblock', 'destinationdblock', 'reqid', 'minramcount', 'statechangetime', 'avgvmem', 'maxvmem', 'maxpss', 'maxrss', 'nucleus', 'eventservice', 'nevents','gshare','noutputdatafiles','parentid','actualcorecount','schedulerid' + values = list(const.JOB_FIELDS) - sqlQuery = """ - SELECT DISTINCT {2} FROM - (SELECT {2} FROM ATLAS_PANDA.JOBSARCHIVED4, - (SELECT jw.pandaid as pid FROM atlas_panda.harvester_rel_jobs_workers jw, atlas_panda.harvester_workers w - WHERE jw.harvesterid=w.harvesterid AND jw.workerid = w.workerid - AND w.lastupdate > CAST (sys_extract_utc(SYSTIMESTAMP) - interval {1} as DATE) - AND jw.lastupdate > CAST (sys_extract_utc(SYSTIMESTAMP) - interval {1} as DATE) - AND w.computingelement like '%{0}%') PIDACTIVE - WHERE PIDACTIVE.pid=ATLAS_PANDA.JOBSARCHIVED4.PANDAID - UNION ALL - SELECT {2} FROM ATLAS_PANDA.JOBSACTIVE4, - (SELECT jw.pandaid as pid FROM atlas_panda.harvester_rel_jobs_workers jw, atlas_panda.harvester_workers w - WHERE jw.harvesterid=w.harvesterid AND jw.workerid = w.workerid - AND w.lastupdate > CAST (sys_extract_utc(SYSTIMESTAMP) - interval {1} as DATE) - AND jw.lastupdate > CAST (sys_extract_utc(SYSTIMESTAMP) - interval {1} as DATE) - AND w.computingelement like '%{0}%') PIDACTIVE - WHERE PIDACTIVE.pid=ATLAS_PANDA.JOBSACTIVE4.PANDAID - UNION ALL - SELECT {2} FROM ATLAS_PANDA.JOBSDEFINED4, - (SELECT jw.pandaid as pid FROM atlas_panda.harvester_rel_jobs_workers jw, atlas_panda.harvester_workers w - WHERE jw.harvesterid=w.harvesterid AND jw.workerid = w.workerid - AND w.lastupdate > CAST (sys_extract_utc(SYSTIMESTAMP) - interval {1} as DATE) - AND jw.lastupdate > CAST (sys_extract_utc(SYSTIMESTAMP) - interval {1} as DATE) - AND w.computingelement like '%{0}%') PIDACTIVE - WHERE PIDACTIVE.pid=ATLAS_PANDA.JOBSDEFINED4.PANDAID - UNION ALL - SELECT {2} FROM ATLAS_PANDA.JOBSWAITING4, - (SELECT jw.pandaid as pid FROM atlas_panda.harvester_rel_jobs_workers jw, atlas_panda.harvester_workers w - WHERE jw.harvesterid=w.harvesterid AND jw.workerid = w.workerid - AND w.lastupdate > CAST (sys_extract_utc(SYSTIMESTAMP) - interval {1} as DATE) - AND jw.lastupdate > CAST (sys_extract_utc(SYSTIMESTAMP) - interval {1} as DATE) - AND w.computingelement like '%{0}%') PIDACTIVE - WHERE PIDACTIVE.pid=ATLAS_PANDA.JOBSWAITING4.PANDAID - UNION ALL - SELECT {2} FROM ATLAS_PANDAARCH.JOBSARCHIVED, - (SELECT jw.pandaid as pid FROM atlas_panda.harvester_rel_jobs_workers jw, atlas_panda.harvester_workers w - WHERE jw.harvesterid=w.harvesterid AND jw.workerid = w.workerid - AND w.lastupdate > CAST (sys_extract_utc(SYSTIMESTAMP) - interval {1} as DATE) - AND jw.lastupdate > CAST (sys_extract_utc(SYSTIMESTAMP) - interval {1} as DATE) - AND w.computingelement like '%{0}%') PIDACTIVE - WHERE PIDACTIVE.pid=ATLAS_PANDAARCH.JOBSARCHIVED.PANDAID) - """.format(computingelment, lastupdated_time, ', '.join(values)) + # rename fields that has '_' in DB but not in model + renamed_fields = { + 'resourcetype': 'resource_type', + 'memoryleak': 'memory_leak', + 'memoryleakx2': 'memory_leak_x2', + 'joblabel': 'job_label', + } + for k, v in renamed_fields.items(): + if k in values: + values.remove(k) + values.append(v) - cur = connection.cursor() - cur.execute(sqlQuery) + db = connections['default'].vendor + sql_query = """ + select distinct {2} from ( + select {2} from {DB_SCHEMA_PANDA}.jobsarchived4 jarch4, ( + select jw.pandaid as pid + from {DB_SCHEMA_PANDA}.harvester_rel_jobs_workers jw, {DB_SCHEMA_PANDA}.harvester_workers w + where jw.harvesterid=w.harvesterid and jw.workerid = w.workerid + and w.lastupdate > {1} + and jw.lastupdate > {1} + and w.computingelement like '%{0}%' + ) hj + where hj.pid=jarch4.pandaid + union all + select {2} from {DB_SCHEMA_PANDA}.jobsactive4 ja4, ( + select jw.pandaid as pid + from {DB_SCHEMA_PANDA}.harvester_rel_jobs_workers jw, {DB_SCHEMA_PANDA}.harvester_workers w + where jw.harvesterid=w.harvesterid and jw.workerid = w.workerid + and w.lastupdate > {1} + and jw.lastupdate > {1} + and w.computingelement like '%{0}%' + ) hj + where hj.pid=ja4.pandaid + union all + select {2} from {DB_SCHEMA_PANDA}.jobsdefined4 jd4, ( + select jw.pandaid as pid + from {DB_SCHEMA_PANDA}.harvester_rel_jobs_workers jw, {DB_SCHEMA_PANDA}.harvester_workers w + where jw.harvesterid=w.harvesterid and jw.workerid = w.workerid + and w.lastupdate > {1} + and jw.lastupdate > {1} + and w.computingelement like '%{0}%' + ) hj + where hj.pid=jd4.pandaid + union all + select {2} from {DB_SCHEMA_PANDA}.jobswaiting4 jw4, ( + select jw.pandaid as pid + from {DB_SCHEMA_PANDA}.harvester_rel_jobs_workers jw, {DB_SCHEMA_PANDA}.harvester_workers w + where jw.harvesterid=w.harvesterid and jw.workerid = w.workerid + and w.lastupdate > {1} + and jw.lastupdate > {1} + and w.computingelement like '%{0}%' + ) hj + where hj.pid=jw4.pandaid + union all + select {2} from {DB_SCHEMA_PANDA_ARCH}.jobsarchived ja, ( + select jw.pandaid as pid + from {DB_SCHEMA_PANDA}.harvester_rel_jobs_workers jw, {DB_SCHEMA_PANDA}.harvester_workers w + where jw.harvesterid=w.harvesterid and jw.workerid = w.workerid + and w.lastupdate > {1} + and jw.lastupdate > {1} + and w.computingelement like '%{0}%' + ) hj + where hj.pid=ja.pandaid + ) + """.format( + computingelement, + interval_last(lastupdated_hours, inter_unit='hour', db=db), + ', '.join(values), + DB_SCHEMA_PANDA=settings.DB_SCHEMA_PANDA, + DB_SCHEMA_PANDA_ARCH=settings.DB_SCHEMA_PANDA_ARCH, + ) + cur = connection.cursor() + cur.execute(sql_query) jobs = cur.fetchall() columns = [str(column[0]).lower() for column in cur.description] for job in jobs: - jobList.append(dict(zip(columns, job))) + job_list.append(dict(zip(columns, job))) - return jobList + return job_list def getHarversterWorkersForTask(request): diff --git a/core/libs/sqlsyntax.py b/core/libs/sqlsyntax.py index 13d053bc..687a305b 100644 --- a/core/libs/sqlsyntax.py +++ b/core/libs/sqlsyntax.py @@ -21,3 +21,19 @@ def interval_to_sec(inter_str, db='oracle'): return '({})*60*60*24'.format(inter_str) else: return '()'.format(inter_str) + + +def interval_last(inter_value, inter_unit='hour', db='oracle'): + """ + Format interval like depending on DB + :param inter_value: int + :param inter_unit: hour|day + :param db: vendor: postgresql|oracle + :return: + """ + if db == 'postgresql': + return "(now() - interval '{} {}s')".format(inter_value, inter_unit) + elif db == 'oracle': + return "cast (sys_extract_utc(systimestamp) - interval '{}' {} as date)".format(inter_value, inter_unit) + else: + return '' diff --git a/core/pandajob/utils.py b/core/pandajob/utils.py index b35887de..48e42b17 100644 --- a/core/pandajob/utils.py +++ b/core/pandajob/utils.py @@ -133,7 +133,7 @@ def job_summary_dict(request, jobs, fieldlist=None): if fieldlist: flist = fieldlist else: - flist = const.JOB_FIELDS_STANDARD + flist = const.JOB_FIELDS_ATTR_SUMMARY numeric_fields = ('attemptnr', 'jeditaskid', 'taskid', 'noutputdatafiles', 'actualcorecount', 'corecount', 'reqid', 'jobsetid',) diff --git a/core/settings/config.py b/core/settings/config.py index adbe6af9..59392fb4 100644 --- a/core/settings/config.py +++ b/core/settings/config.py @@ -135,6 +135,7 @@ DATABASES = dbaccess_oracle_atlas CRIC_API_URL = 'https://atlas-cric.cern.ch/api/atlas/pandaqueue/query/?json' IDDS_HOST = 'https://iddsserver.cern.ch:443/idds' + RUCIO_UI_URL = 'https://rucio-ui.cern.ch/' elif DEPLOYMENT == 'POSTGRES': DB_SCHEMA = 'doma_pandabigmon' DB_SCHEMA_PANDA = 'doma_panda' @@ -144,6 +145,7 @@ DATABASES = dbaccess_postgres CRIC_API_URL = os.environ.get('CRIC_API_URL', 'https://datalake-cric.cern.ch/api/atlas/pandaqueue/query/?json') IDDS_HOST = os.environ.get('IDDS_HOST', 'https://iddsserver.cern.ch:443/idds') + RUCIO_UI_URL = os.environ.get('RUCIO_UI_URL', '') PRMON_LOGS_DIRECTIO_LOCATION = os.environ.get('PRMON_LOGS_DIRECTIO_LOCATION', "https://storage.googleapis.com/drp-us-central1-logging" "/logs/{queue_name}/PandaJob_{panda_id}") @@ -156,6 +158,7 @@ DATABASES = dbaccess_oracle_doma CRIC_API_URL = 'https://datalake-cric.cern.ch/api/atlas/pandaqueue/query/?json' IDDS_HOST = 'https://aipanda015.cern.ch:443/idds' + RUCIO_UI_URL = os.environ.get('RUCIO_UI_URL', '') PRMON_LOGS_DIRECTIO_LOCATION = "https://storage.googleapis.com/drp-us-central1-logging/logs/{queue_name}/PandaJob_{panda_id}" DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' diff --git a/core/templates/_base_core.html b/core/templates/_base_core.html index aec2546a..9291fc0f 100644 --- a/core/templates/_base_core.html +++ b/core/templates/_base_core.html @@ -251,142 +251,137 @@
-
-
  • - Services - -
  • +
    + +
    +
    + + {% endif %} + {% if viewParams.MON_VO == 'ATLAS' %} +
  • + Prodsys + +
  • - {% endif %} +
  • + Services + +
  • - {% if "username" in request.session %} -
  • - {{request.session.username}} -
  • - {% endif %} + {% endif %} + + {% if "username" in request.session %} +
  • + {{request.session.username}} +
  • + {% endif %} + +
  • + Help + +
  • + {% if request.user and request.user.is_authenticated %}
  • - Help - + {{ request.user.first_name }} +
  • - - {% if request.user and request.user.is_authenticated %} -
  • - {{ request.user.first_name }} - + {% else %} +
  • + Login
  • - {% else %} -
  • - Login -
  • - {% endif %} + {% endif %} - {% if request.user and request.user.is_authenticated and request.user.is_tester and '/errors/' in request.path %} -
  • - -
  • - {% endif %} - + {% if request.user and request.user.is_authenticated and request.user.is_tester and '/errors/' in request.path %} +
  • + +
  • + {% endif %} + diff --git a/core/templates/datasetList.html b/core/templates/datasetList.html index fcc361a2..fd237c13 100644 --- a/core/templates/datasetList.html +++ b/core/templates/datasetList.html @@ -164,7 +164,12 @@ "data": "type", sDefaultContent: "-", "render": function(data, type, row, meta) { - return ''; + if ("{{ request.session.rucio_ui }}".length > 0) { + return ''; + } + else { + return '-' + } } }, diff --git a/core/templates/dpMain.html b/core/templates/dpMain.html deleted file mode 100644 index 3a53dea0..00000000 --- a/core/templates/dpMain.html +++ /dev/null @@ -1,1267 +0,0 @@ -{% extends "_base_core.html" %} - -{% block page_title %} DPC {% endblock %} -{% block subtitle %} Prototype Data Product Browser - Main - Datasets - Requests - Processing -{% endblock %} - -{% block body %} - -{% if messages %} -

    - - {% for message in messages %} - {{ message|safe }}
    - {% endfor %} -
    -

    -{% endif %} - - - - - -{% if mode == 'intro' %} - - - - - - - - - - - -
    -

    -This Data Product Browser component of the bigpanda monitor is a collection of prototype -ideas and elements for a Data Product Catalog and surrounding services, as sketched out by -Torre at the Feb 23 2015 ADC development meeting. -

    - -

    -The bigpanda monitor is oriented towards showing how the system is operating. These views are oriented towards -showing how the system is being used, from the perspective of generating and using physics data products. -

    -
    -Production requests -
    -Current processing -
    -Dataset search -
    - -{% endif %} - -{% if mode == 'dataset' %} - -{% if show_form %} - - - -
    -

    Dataset search

    -
    -{{ dataset_form.non_field_errors }} - - {% csrf_token %} - -{{ xform.as_table }} - -{% for field in dataset_form %} -{% if field.is_hidden %} -{{ field }} -{% elif field.field.label == 'Date' %} - -{% else %} - - -{% endif %} - -{% endfor %} - -{% if not emptyform %} -{% comment %} Only need to show the submit button if there's more to the form than the type selection {% endcomment %} - -{% endif %} - -
    Date - --- - -
    - {% for error in field.errors %} - {% if error != "This field is required." %} -

    {{ error }}

    - {% endif %} - {% endfor %} -{% if field.field.required %} -{{ field.field.label }} * -{% else %} -{{ field.field.label }} -{% endif %} -
    {{ field.help_text|safe }} - {{ field }} -
    - -
    -
    -{% endif %} - -
    - -{% endif %} - -{% if dataset %} -
    -

    Search results for dataset

    -

    -{{ dataset }} -{% if scope %}
    Scope: {{ scope }} {% endif %} -{% if tid %}
    tid suffix "{{ tid }}" trimmed for search. {% endif %} -{% if tidnum %} The tid dataset was created by task {{ tidnum }} {% endif %} -

    -Rucio link: -
    {{ dataset }} -{% if tid %} -
    {{ dataset }}{{ tid }} - -{% endif %} -

    -

    -
    -{% endif %} - -{% if reqid %} - -

    Production request {{ reqid }}

    -{% if thisProject %} -

    - {{ request.description }} -
    Project: {{ thisProject.project }} {{ thisProject.description }} -

    -{% if info_fields.long_description|length > 0 %} -
    -Description:
    - -{% comment %}
     {% endcomment %}
    -{{ info_fields.long_description|safe }}
    -{% comment %} 
    {% endcomment %} - -
    -{% endif %} - -{% endif %} - -{% elif requests %} - -

    Production requests

    - - - -{% for fdict in reqsuml %} - -{% endfor %} -
    Request attribute summary Sort by {% if request.GET.sortby == 'count' %} count, alpha {% else %} count, alpha {% endif %}
    {{ fdict.field }} ({{ fdict.list|length }}) -{% for item in fdict.list %} - {{ item.kname }} -({{ item.kvalue }}) -{% endfor %} -
    - - - -{% for fdict in ptasksuml %} - -{% endfor %} -
    Production task attribute summary -
    {{ fdict.field }} ({{ fdict.list|length }}) -{% for item in fdict.list %} - {{ item.kname }} -({{ item.kvalue }}) -{% endfor %} -
    - -{% endif %} - -{% if requests %} -{% comment %} CStatus is the cstatus field in the TRequest record. Status is the status field in the RequestStatus record. {% endcomment %} -

    -Show details -

    - - - - - - - - - - -{% for request in requests %} - - - - - - - - - - -{% endfor %} -
    Request Group
    Manager
    Description
    Slices GeV Provenance Reference link -
    Comment -
    Project -
    Campaign -
    Type
    Status Timestamp
    {{ request.reqid }} {{ request.phys_group }} -
    -{{ request.manager|slice:":14" }} -
    {{ request.description }} -
    -{% if request.nslices %} {{ request.nslices }} slices {% endif %} -{{ request.energy_gev }} GeV -{% if request.provenance %} - Provenance:{{ request.provenance }} -{% endif %} -{% if request.ref_link %} - {{ request.ref_link_path }} -{% endif %} - -Show details - -{% if request.ntasks %} - -{% for istep in request.ntasks %} - - -{% endfor %} -
    {{ istep.0 }} -{% for istate in istep.1 %} -{{ istate.0 }}:{{ istate.1 }} -{% endfor %} -
    -{% endif %} - - - -{% if request.comment %}
    {{ request.comment }} {% endif %} -
    {{ request.project_id }} -{% if request.campaign != request.project_id %} -
    {{ request.campaign|slice:":15" }} {{ request.sub_campaign }} -{% endif %} -
    -{% if request.is_fast == True %} Fast {% endif %} - {{ request.request_type|slice:":6" }}
    - - -{% if request.nrequestedevents %} - -{% endif %} - -{% if not request.completedevpct and not request.nprocessedevents %} - -{% endif %} - -{% if request.completedevpct %} - -{% for istep in request.completedevpct %} - -{% endfor %} - -{% elif totalfiles %} - -{% for typ in totalfiles %} - -{% endfor %} - -{% elif request.nprocessedevents %} - -{% for istep in request.nprocessedevents %} - -{% endfor %} - -{% endif %} - -{% comment %} - - - - - - - -{% endcomment %} - -
    {{ request.nrequestedevents }}k evs requested
    request {{ request.cstatus }}
    - - -
    - - -
    -{{ istep.0 }}: {{ istep.1 }}k evs -
    - - -
    - Progress: 80% -
    -
    -
    - - -
    - - - -
    -
    - -
    {{ request.timestamp|date:"Y-m-d H:i" }}
    - - - -{% endif %} - -{% if reqid %} - -{% if events_processed %} - - - -{% for evts in events_processed %} - -{% endfor %} -
    Events produced, from prodsys2 counts
    Step ctag Events produced in healthy tasks (not aborted or broken)
    {{ evts.0 }} {{ evts.1 }}k
    -{% endif %} - -{% if jobsum %} - - - -{% for j in jobsum %} - -{% endfor %} -
    Events produced in last 3 days, from finished job counts. Job list
    Processingtype Events produced
    {{ j.processingtype }} {{ j.nevents__sum }}k
    -{% endif %} - -{% if tasks %} - - -{% for fdict in jtasksuml %} - -{% endfor %} -
    PanDA JEDI task attribute summary - Task list - -
    {{ fdict.field }} ({{ fdict.list|length }}) -{% for item in fdict.list %} - {{ item.kname }} -({{ item.kvalue }}) -{% endfor %} -
    -{% endif %} - -{% endif %} - -{% if datasets and not reqid %} - - - -{% for e in datasets %} - - - - - - - - - -{% endfor %} -
    Production system task datasets: {{ datasets|length }}
    Task ID Dataset Events Files Status Group Timestamp
    {{ e.task_id }} {{ e.name }} -
    Request: {{ e.ptask.request_id }}{% if e.ptask.step.slice.slice %} slice {{ e.ptask.step.slice.slice }}{% endif %} -{% if e.ptask.step.step_template.step %} Step: {{ e.ptask.step.step_template.step }} {{ e.ptask.step.step_template.ctag }}{% endif %} - -{% if e.parent_task_id and e.parent_task_id != e.task_id%} - Parent task {{ e.parent_task_id }} -{% endif %} -
    {{ e.events }} {{ e.files }} {{ e.status|lower }} {{ e.phys_group }} {{ e.timestamp|date:"Y-m-d H:i" }}
    -{% endif %} - -{% if containers %} - - - -{% for e in containers %} - - - - - - - -{% endfor %} -
    Containers: {{ containers|length }}
    Parent task Container Status Request Group
    {{ e.parent_task_id }} {{ e.name }} {{ e.status }} {{ e.rid }} {{ e.phys_group }}
    -{% endif %} - -{% if dsslices %} - - - -{% for s in dsslices %} - -{% endfor %} -
    Production slices: {{ dsslices|length }}
    Request#Slice Dataset Input evs
    -{{ s.request_id }} #{{ s.slice }} - -{{ s.dataset_id }} - -{% if s.brief %}
    {{ s.brief }} {% endif %} -{% if s.phys_comment %}
    {{ s.phys_comment }} {% endif %} -{% if s.comment %}
    {{ s.comment }} {% endif %} -
    -
    -{{ s.input_events }} -
    -{% endif %} - -{% if jedidatasets %} - - - - - - - - {% if dsrec.scope %} {% endif %} - - - - - - - - - - - -{% for dsrec in jedidatasets %} - - - - - - {% if dsrec.scope %} {% endif %} - - - - - - - - - - - - - - - -{% endfor %} -
    JEDI datasets: {{ jedidatasets|length }}
    Task Dataset
    TypeScopeFilesNfiles
    finished
    Nfiles
    failed
    NeventsCreatedModifiedStatusStreamTokenID
    {{ dsrec.jeditaskid }} {{ dsrec.datasetname }}
    {{ dsrec.type }}{{ dsrec.scope }}{{ dsrec.nfiles }}{{ dsrec.nfilesfinished }}{{ dsrec.nfilesfailed }}{{ dsrec.nevents }}{{ dsrec.creationtime|date:"Y-m-d H:i" }}{{ dsrec.modificationtime|date:"m-d H:i" }}{{ dsrec.status }}{{ dsrec.streamname }}{{ dsrec.storagetoken }}{{ dsrec.datasetid }}
    -{% endif %} - - -{% if njeditasks > 0 %} - - - - -{% for e in jeditasks %} -{% if e.has_dataset %} - - - - - - - - - - - - -{% endif %} -{% endfor %} -
    JEDI tasks: {{ njeditasks }}
    Task ID Task info Status Group Cloud Cores RAM Modified Info
    Datasets
    {{ e.jeditaskid }} {{ e.taskname }} -
    -Request: {{ e.reqid }} - Splitrule: {{ e.splitrule }} -Trf: {{ e.transpath }} {{ e.transuses }} -
    -
    {{ e.superstatus }} {{ e.workinggroup }} {{ e.cloud }} {{ e.corecount }} {{ e.ramcount }} {{ e.modificationtime|date:"Y-m-d H:i" }} {{ e.dsinfo }}
    -{% for ds in e.datasets %} -{{ ds.type }} - {{ ds.datasetname }} - {{ ds.status }} - files={{ ds.nfiles }} -{% if ds.type == 'output' %} evs={{ ds.nevents }} {% endif %} -
    -{% endfor %} -
    -{% endif %} - -{% if files %} - - -{% for e in files %} - - - - -{% endfor %} -
    {{ datasets|length }} files into dataset
    {{ e.lfn }} PandaID {{ e.pandaid }}
    -{% endif %} - -{% if tasks and not reqid %} - - -{% for e in tasks %} - - - -{% endfor %} -
    {{ tasks|length }} tasks
    {{ e.name }}
    -{% endif %} - -{% if steps and not reqid %} - - -{% for e in steps %} - - - - - - - -{% endfor %} -
    {{ steps|length }} steps
    {{ e.id }} {{ e.status }} {{ e.input_events }} {{ e.priority }} {{ e.task_config }}
    -{% endif %} - -{% if slices %} - - -{% for e in slices %} -{% if not e.is_hide %} - - - - - - - - - - - -{% endif %} -{% endfor %} -
    {{ slices|length }} slices Click on step status, dataset name, task name to toggle details
    {{ e.slice }} {{ e.input_data }} -{% if e.comment %}
    {{ e.comment }} {% endif %} -{% if e.phys_comment %}
    {{ e.phys_comment }} {% endif %} -{% if e.brief %}
    {{ e.brief }} {% endif %} -
    - -{{ e.dataset_id_html|safe }} - -{% comment %} -{{ e.dataset_id_html|safe }} - -{% endcomment %} - -{% if e.cloned_from %} -Cloned from {{ e.cloned_from }}
    -{% endif %} -{% if e.clones %} -Clones: -{% for c in e.clones %} - {{ c }} -{% endfor %} -
    -{% endif %} - -
    evts
    {{ e.input_events }}
    prio
    {{ e.priority }}
    - -{% for step in e.steps %} - {{ step.ctag.step }} -{{ step.ctag.ctag }} -created {{ step.ctag.def_time|date:"Y-m-d H:i" }} - {{ step.ctag.output_formats }} - {{ step.ctag.trf_name }} {{ step.ctag.swrelease }} - mem={{ step.ctag.memory }} - cpu/ev={{ step.ctag.cpu_per_event }} -{% if step.step_parent %} parent={{ step.step_parent__step_template__step }} {{ step.step_parent__step_template__ctag }} {% endif %} - {{ step.status }}
    - - -{% for task in step.tasks %} - task {{ task.id }}: {{ task.name }} {{ task.status }} - - JEDI task {% if task.jedistatus != task.status %} {{ task.jedistatus }} {% endif %} - - {% if task.total_req_jobs > 0 or task.total_done_jobs > 0 %} - jobs done/req:{{ task.total_done_jobs }}/{{ task.total_req_jobs }} - {% else %} - Look for jobs - {% endif %} - {% if task.total_events and task.total_events > 0 %} - totevs={{ task.total_events }} - {% endif %} - - {% if task.jeditask.progress %} progress={{ task.jeditask.progress }} {% endif %} - - {% if task.jeditask.failurerate %} failurerate={{ task.jeditask.failurerate }} {% endif %} - - {% if task.jobstats %} - jobs: - - {% for s in task.jobstats %} {{ s|safe }} {% endfor %} - - {% endif %} - - {% if task.chain_tid and task.chain_tid != task.id %} - chain_tid {{ task.chain_tid }} - {% endif %} - {% if task.parent_tid and task.parent_tid != task.id %} - parent_tid {{ task.parent_tid }} - {% endif %} -
    - - {% if task.jedi_info %} - JEDI: {{ task.jedi_info|safe }}
    - {% endif %} -{% endfor %} - -{% endfor %} -
    -
    -{% endif %} - -{% if reqid %} - - -{% for col in request_columns %} - -{% endfor %} -
    All request parameters
    {{ col.name }} {{ col.value }}
    -{% endif %} - -{% if mode == 'processing' %} -

    Current queued production jobs, all pre-run states

    -

    -Total queued production jobs: {{ totqjobs }} requests: {{ queuejobs|length }} -

    - - -{% for r in queuejobs %} - 10 %} class="full_queued_list" style="display: none"{% endif %} > - - - - - -{% endfor %} -
    Request -
    Show all -
    Info Description Queued jobs Share of all queued production jobs
    {{ r.reqid }} {{ r.reqdata.campaign }} {{ r.reqdata.phys_group }} {{ r.reqdata.project_id }} {{ r.reqdata.manager }} {{ r.reqdata.description }} {{ r.reqid__count }}
    - -

    Current running production jobs

    -

    -Total running production jobs: {{ totrunjobs }} requests: {{ runjobs|length }} -

    - - -{% for r in runjobs %} - 10 %} class="full_running_list" style="display: none"{% endif %}> - - - - - -{% endfor %} -
    Request -
    Show all -
    Info Description Running jobs Share of all running production jobs
    {{ r.reqid }} {{ r.reqdata.campaign }} {{ r.reqdata.phys_group }} {{ r.reqdata.project_id }} {{ r.reqdata.manager }} {{ r.reqdata.description }} {{ r.reqid__count }}
    - -

    Events produced, last 3 days

    - - - -{% for r in projeventsl %} - 10 %} class="full_project_event_counts" style="display: none"{% endif %}> - - - -{% endfor %} -
    Project -
    Show all -
    Events (k) Fraction of events
    {{ r.project }} {{ r.nevents }}
    - - - -{% for r in sumeventsl %} - 10 %} class="full_event_counts" style="display: none"{% endif %}> - - - - - -{% endfor %} -
    Request -
    Show all -
    Info Description Events (k) Fraction of events
    {{ r.reqid }} {{ r.reqdata.campaign }} {{ r.reqdata.phys_group }} {{ r.reqdata.project_id }} {{ r.reqdata.manager }} {{ r.reqdata.description }} {{ r.nevents }}
    - -

    Completed production jobs, last 3 days

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Job counts
    total: {{ job_total }}
    finished: {{ totjobs.finished }} {{ jobpct.finished }}% CPU usage
    total: {{ cpu_total }}
    finished: {{ totcpu.finished }} {{ cpupct.finished }}%
    failed: {{ totjobs.failed }} {{ jobpct.failed }}% failed: {{ totcpu.failed }} {{ cpupct.failed }}%
    cancelled: {{ totjobs.cancelled }} {{ jobpct.cancelled }}% cancelled: {{ totcpu.cancelled }} {{ cpupct.cancelled }}%
    - -{% for s in sumjobs %} -

    {{s.status}} job counts

    - - -{% for r in s.recs %} - 10 %} class="full_{{ s.status}}_job_counts" style="display: none"{% endif %}> - - - - - -{% endfor %} -
    Request -
    Show all -
    Info Description Jobs Fraction of {{s.status}} jobs
    {{ r.reqid }} {{ r.reqdata.campaign }} {{ r.reqdata.phys_group }} {{ r.reqdata.project_id }} {{ r.reqdata.manager }} {{ r.reqdata.description }} {{ r.jobstatus__count }}
    -{% endfor %} - -{% for s in cpujobs %} -

    {{s.status}} job walltime

    - - -{% for r in s.recs %} - 10 %} class="full_{{ s.status}}_cpu_counts" style="display: none"{% endif %}> - - - - - -{% endfor %} -
    Request -
    Show all -
    Info Description Walltime Fraction of {{s.status}} walltime
    {{ r.reqid }} {{ r.reqdata.campaign }} {{ r.reqdata.phys_group }} {{ r.reqdata.project_id }} {{ r.reqdata.manager }} {{ r.reqdata.description }} {{ r.cpuconsumptiontime__sum }}
    -{% endfor %} - -{% endif %} - - - - -{% endblock %} - -{% block help %} - - -{% endblock %} diff --git a/core/templates/jobInfo.html b/core/templates/jobInfo.html index cc32a2e8..c326d550 100644 --- a/core/templates/jobInfo.html +++ b/core/templates/jobInfo.html @@ -11,6 +11,7 @@ + {% endblock %} {% block page_title %}PanDA job {{ pandaid }}{% endblock %} @@ -311,9 +312,9 @@ {{coreData.processinstance}} process for {{ coreData.pipelinetask }} pipeline task {% endif %} - {% if rucioUserName|length > 0 %} + {% if rucioUserName|length > 0 and request.session.rucio_ui|length > 0 %} {% for rucioUserNameIt in rucioUserName %} - DDM User activity ({{ rucioUserNameIt }}) + DDM User activity ({{ rucioUserNameIt }}) {% endfor %} {% endif %} @@ -432,7 +433,7 @@ {% if job.destinationse %}Output destination{{ job.destinationse }}{% endif %} {% if job.cpuconsumptiontime %}CPU consumption time (s){{ job.cpuconsumptiontime }}{% endif %} {% if job.jobmetrics %}Job metrics{{ job.jobmetrics }}{% endif %} - {% if jobparams %}Job parameters{{ jobparams }}{% endif %} + {% if jobparams %}Job parameters
    {{ jobparams }}
    {% endif %} {% if job.pilotid %}Pilot ID{{ job.pilotid }}{% endif %} {% if job.batchid %}Batch ID{{ job.batchid }}{% endif %} @@ -492,6 +493,7 @@ $(document).ready(function () { + $(".comment").shorten({showChars: getNCharsShorten(), minHideChars: 250}); var files_list = {{ files|safe }}; buildFilesTable(files_list); let url = window.location.href; @@ -598,7 +600,9 @@ let links = ''; if (row['type'] !== 'pseudo_input') { links += ''+row['lfn']+''; - links += ' '; + if ("{{ request.session.rucio_ui }}".length > 0) { + links += ' '; + } } else { links += row['lfn']; @@ -676,8 +680,11 @@ "render": function(data, type, row, meta) { let links = ''; if (row['type'] !== 'pseudo_input' && data) { - links += ''; - links += ', '+row['datasetname']+''; + if ("{{ request.session.rucio_ui }}".length > 0) { + links += ''; + } + links += links.length > 0 ? ', ' : ''; + links += ''+row['datasetname']+''; } else if (data) { links += data; @@ -693,9 +700,9 @@ sDefaultContent: "-", className: 'num', "render": function(data, type, row, meta) { - if (data && data.length > 0) { + if (data && data.length > 0 && "{{ request.session.rucio_ui }}".length > 0) { let scope = (!row['dispatchdblock'].includes(row['scope']) && row['dispatchdblock'].split('.')[0] === 'panda') ? 'panda' : row['scope']; - return '' + return '' } else { return '-' @@ -708,8 +715,8 @@ className: 'num', "render": function(data, type, row, meta) { let links = row['destinationdblock_vis']; - if (data && data.length > 0) { - links += ' ' + if (data && data.length > 0 && "{{ request.session.rucio_ui }}".length > 0) { + links += ' ' } else { links += '-' diff --git a/core/templates/jobInfoES.html b/core/templates/jobInfoES.html index 1fff56e7..e2201fa5 100644 --- a/core/templates/jobInfoES.html +++ b/core/templates/jobInfoES.html @@ -8,6 +8,7 @@ + {% endblock %} {% block page_title %}PanDA job {{ pandaid }}{% endblock %} @@ -270,9 +271,9 @@ {{coreData.processinstance}} process for {{ coreData.pipelinetask }} pipeline task {% endif %} - {% if rucioUserName|length > 0 %} + {% if rucioUserName|length > 0 and request.session.rucio_ui|length > 0 %} {% for rucioUserNameIt in rucioUserName %} - DDM User activity ({{ rucioUserNameIt }}) + DDM User activity ({{ rucioUserNameIt }}) {% endfor %} {% endif %} @@ -384,7 +385,7 @@ {% if job.destinationse %}Output destination{{ job.destinationse }}{% endif %} {% if job.cpuconsumptiontime %}CPU consumption time (s){{ job.cpuconsumptiontime }}{% endif %} {% if job.jobmetrics %}Job metrics{{ job.jobmetrics }}{% endif %} - {% if jobparams %}Job parameters{{ jobparams }}{% endif %} + {% if jobparams %}Job parameters
    {{ jobparams }}
    {% endif %} {% if job.pilotid %}Pilot ID{{ job.pilotid }}{% endif %} {% if job.batchid %}Batch ID{{ job.batchid }}{% endif %} @@ -444,6 +445,7 @@ $(document).ready(function () { + $(".comment").shorten({showChars: getNCharsShorten(), minHideChars: 250}); var files_list = {{ files|safe }}; buildFilesTable(files_list); var events_list = {{ evtable|safe }}; @@ -547,7 +549,9 @@ let links = ''; if (row['type'] !== 'pseudo_input') { links += ''+row['lfn']+''; - links += ' '; + if ("{{ request.session.rucio_ui }}".length > 0) { + links += ' '; + } } else { links += row['lfn']; @@ -625,7 +629,9 @@ "render": function(data, type, row, meta) { let links = ''; if (row['type'] !== 'pseudo_input' && data) { - links += ''; + if ("{{ request.session.rucio_ui }}".length > 0) { + links += ''; + } links += ', '+row['datasetname']+''; } else if (data) { @@ -642,8 +648,8 @@ sDefaultContent: "-", className: 'num', "render": function(data, type, row, meta) { - if (data && data.length > 0) { - return '' + if (data && data.length > 0 && "{{ request.session.rucio_ui }}".length > 0) { + return '' } else { return '-' @@ -656,8 +662,8 @@ className: 'num', "render": function(data, type, row, meta) { let links = row['destinationdblock_vis']; - if (data && data.length > 0) { - links += ' ' + if (data && data.length > 0 && "{{ request.session.rucio_ui }}".length > 0) { + links += ' ' } else { links += '-' diff --git a/core/templates/jobList.html b/core/templates/jobList.html index cd4a64fc..ebbf9afc 100644 --- a/core/templates/jobList.html +++ b/core/templates/jobList.html @@ -247,8 +247,9 @@
    Overall error summary
    {% if errsByCount %}
    Click to show/hide failed jobs details
    {% endif %} - +{% if viewParams.MON_VO == 'ATLAS' %} Prodsys Jobs Handling +{% endif %}

    @@ -345,7 +346,7 @@
    Overall error summary
    Job name: {{ job.jobname }} #{{ job.attemptnr }} - Datasets: {% if job.proddblock %} In: {{ job.proddblock }} || Rucio link {% endif %} {% if job.destinationdblock %} {% if job.proddblock %}
    {% endif %} Out: {{ job.destinationdblock }} {% endif %} + Datasets: {% if job.proddblock %} In: {{ job.proddblock }} {% if request.session.rucio_ui|length > 0 %}|| Rucio link{% endif %} {% endif %} {% if job.destinationdblock %} {% if job.proddblock %}
    {% endif %} Out: {{ job.destinationdblock }} {% endif %} {% endfor %}
    diff --git a/core/templates/jobListES.html b/core/templates/jobListES.html index 055f0b21..983577cd 100644 --- a/core/templates/jobListES.html +++ b/core/templates/jobListES.html @@ -315,7 +315,7 @@
    Overall error summary
    {{ job.esjobstr }} - Datasets: {% if job.proddblock %} In: {{ job.proddblock }} || Rucio link {% endif %} {% if job.destinationdblock %} {% if job.proddblock %}
    {% endif %} Out: {{ job.destinationdblock }} {% endif %} + Datasets: {% if job.proddblock %} In: {{ job.proddblock }} {% if request.session.rucio_ui|length > 0 %} || Rucio link{% endif %} {% endif %} {% if job.destinationdblock %} {% if job.proddblock %}
    {% endif %} Out: {{ job.destinationdblock }} {% endif %} {% endfor %} diff --git a/core/templates/jobListESProto.html b/core/templates/jobListESProto.html deleted file mode 100644 index 8b9bfde6..00000000 --- a/core/templates/jobListESProto.html +++ /dev/null @@ -1,159 +0,0 @@ -{% extends "_base_core.html" %} - -{% block page_title %} {{ viewParams.MON_VO }} PanDA ES jobs{% endblock %} -{% block title %} {{ viewParams.MON_VO }} PanDA monitor{% endblock %} -{% block subtitle %}PanDA Event Service jobs{{ viewParams.selection|safe }} - -{% endblock %} -{% block body %} - -{{ viewParams.header }} - -{{ njobs }} jobs in this selection -{% if requestParams.transferringnotupdated %}
    Jobs in transferring state for more than {{ requestParams.transferringnotupdated }} hours {% endif %} -{% if requestParams.statenotupdated %}
    Jobs in {{ requestParams.jobstatus }} state for more than {{ requestParams.statenotupdated }} hours {% endif %} -{% if requestParams.workinggroup %}
    Working group: {{ requestParams.workinggroup }} {% endif %} -{% if requestParams.jobtype %}
    Job type: {{ requestParams.jobtype }} {% endif %} -{% if requestParams.jobstatus %}
    Job status: {{ requestParams.jobstatus }} {% endif %} -{% if requestParams.cloud %}
    Cloud: {{ requestParams.cloud }} {% endif %} -{% if requestParams.computingsite %}
    Site: {{ requestParams.computingsite }} Show site information page {% endif %} -{% if user %}
    User: {{ user }} Show user page {% endif %} -{% if requestParams.jeditaskid and requestParams.jeditaskid != 'None' %}
    Task: {{ requestParams.jeditaskid }} {{ taskname }} {% endif %} -{% if requestParams.taskid and requestParams.taskid != 'None' %}
    Task: {{ requestParams.taskid }} {{ taskname }} {% endif %} -{% if requestParams.jobsetid %}
    Jobset ID: {{ requestParams.jobsetid }} {% endif %} -{% if requestParams.parentid %}
    Parent ID: {{ requestParams.parentid }} {% endif %} -{% if requestParams.jobname %}
    Job name: {{ requestParams.jobname }} {% endif %} -{% if requestParams.priorityrange %}
    Current priority range: {{ requestParams.priorityrange }} {% endif %} -{% if requestParams.processingtype %}
    Processing type: {{ requestParams.processingtype }} {% endif %} -{% if requestParams.transformation %}
    Transformation: {{ requestParams.transformation }} {% endif %} -

    - -{% if jobList %} -
    Job modification times in this listing range from {{ tfirst }} to {{ tlast }}. -
    Job current priorities in this listing range from {{ plow }} to {{ phigh }}. See priorityrange in the job attribute summary to see how priorities are distributed. -{% endif %} - -{% if ndrops > 0 %} -
    -{{ ndrops }} jobs were dropped from this listing because they were retried. Where there were retries, the latest retry job (only) is listed. - -
    Dropped (retry):
    -{% for drop in droplist %} -{{ drop.pandaid }} ({{ drop.newpandaid }}) -{% endfor %} -
    -
    -

    -{% endif %} - -{% if sumd %} - - -{% for fdict in sumd %} - - - -{% endfor %} -
    Job attribute summary Sort by {% if requestParams.sortby == 'count' %} count, alpha {% else %} count, alpha {% endif %}
    {{ fdict.field }} ({{ fdict.list|length }})
    - {% for item in fdict.list %} - {% if fdict.field == 'JEDITASKID' %} {{ item.kname }} ({{ item.kvalue }}) - {% else %} - {% if fdict.field == 'JOBSTATUS' %} {% else %} {% endif %} {{ item.kname }} - {% if fdict.field == 'EVENTSERVICESTATUS' %} ({{ item.kvalue }}) {% else %} - ({{ item.kvalue }}) - {% endif %} - {% endif %} - {% endfor %} -
    -
    - - - - - - - - - - - - - - - - - - - - - {% for job in jobList %} - - - - - - - - - - - - - - - - - - - - - - {% endfor %} -
    - Job list -{% if display_limit and display_limit < njobs %} -Only the most recent {{ display_limit }} jobs (sorted by PandaID) are shown. Remove the limit and sort by -PandaID, -{% else %} -Sort by -{% if sortby == "PandaID" %} -PandaID, -{% else %} -PandaID, -{% endif %} -{% endif %} - -{% if sortby == "time-ascending" %} -ascending mod time, descending mod time, priority, attemptnr -{% elif sortby == "time-descending" %} -ascending mod time, descending mod time, priority, attemptnr -{% elif sortby == "priority" %} -ascending mod time, descending mod time, priority, attemptnr -{% elif sortby == "attemptnr" %} -ascending mod time, descending mod time, priority, attemptnr -{% else %} -ascending mod time, descending mod time, priority, attemptnr -{% endif %} - -
    PanDA ID
    Attempt#
    Owner {% if viewParams.MON_VO != 'ATLAS' %} / VO{% endif %}
    Group
    Task IDTransformationModeCoresStatusSubstateCreatedTime to start
    d:h:m:s
    Duration
    d:h:m:s
    Mod{% if viewParams.MON_VO == 'ATLAS' %}Cloud {% endif%}SitePriorityJob info
    {{ job.pandaid }}
    Attempt {{ job.attemptnr }}
    {{ job.produsername }}{% if job.workinggroup %}
    {{ job.workinggroup }}{% endif %}{% if viewParams.MON_VO != 'ATLAS' %}{% if job.vo %} / {{ job.vo }}{% endif %} {% endif %} -
    {% if job.jeditaskid and job.jeditaskid != 'None' %}{{ job.jeditaskid }} {% elif job.taskid and job.taskid != 'None' %} {{ job.taskid }} {% endif %}{{ job.transformation }}{{ job.jobmode }}{{ job.corecount }}{{ job.jobstatus }}{{ job.substate }}{{ job.creationtime }}{{ job.waittime }}{{ job.duration }}{{ job.modificationtime }}{% if viewParams.MON_VO == 'ATLAS' %}{{job.cloud}} {% endif %}{{ job.computingsite }}{{ job.currentpriority }}{% if job.jobinfo != '' %}{{job.jobinfo|force_escape|safe}}
    {% endif %} - {% if job.errorinfo != '' %}{{job.errorinfo|force_escape|safe}}{% endif %} -
    {{ job.esjobstr }}
    Datasets: {% if job.proddblock %} In: {{ job.proddblock }} || Rucio link {% endif %} {% if job.destinationdblock %} {% if job.proddblock %}
    {% endif %} Out: {{ job.destinationdblock }} {% endif %}
    - -{% else %} - -

    -No jobs matched the query. You can increase the time depth with a days=N parameter on the URL, but use judiciously, this is hard on the database. If you know the jobsetid or taskid/jeditaskid of the jobs you're interested in, you can add these to the URL, and the search will not be time limited (because these IDs give the DB enough indexing info that it isn't stressful on the DB to exclude the time constraint) and it will be fast also. -

    - -{% endif %} - -{% endblock %} - - -{% block help %} -{% include "jobListHelp.html" %} -{% include "jobInfoHelp.html" with show="all" %} -{% endblock %} - diff --git a/core/templates/jobListProto.html b/core/templates/jobListProto.html deleted file mode 100644 index b7afb907..00000000 --- a/core/templates/jobListProto.html +++ /dev/null @@ -1,235 +0,0 @@ -{% extends "_base_core.html" %} -{% load static %} -{% block page_title %} {{ viewParams.MON_VO }} PanDA jobs{% endblock %} -{% block subtitle %}PanDA jobs{{ viewParams.selection|safe }} - -{% if showwarn and njobs%} - Warning: limit {{joblimit}} per job table -{% endif %} -{% if showTop %} Task has {{totalJobs}} jobs in table Jobsarchived, limit is set to {{limit}} {% endif %} - -{% endblock %} -{% block body %} - -{{ viewParams.header }} - -{{ njobs }} jobs in this selection -{% if requestParams.transferringnotupdated %}
    Jobs in transferring state for more than {{ requestParams.transferringnotupdated }} hours {% endif %} -{% if requestParams.statenotupdated %}
    Jobs in {{ requestParams.jobstatus }} state for more than {{ requestParams.statenotupdated }} hours {% endif %} -{% if requestParams.workinggroup %}
    Working group: {{ requestParams.workinggroup }} {% endif %} -{% if requestParams.jobtype %}
    Job type: {{ requestParams.jobtype }} {% endif %} -{% if requestParams.jobstatus %}
    Job status: {{ requestParams.jobstatus }} {% endif %} -{% if requestParams.cloud %}
    Cloud: {{ requestParams.cloud }} {% endif %} -{% if requestParams.computingsite %}
    Site: {{ requestParams.computingsite }} Show site information page {% endif %} -{% if user %}
    User: {{ user }} Show user page {% endif %} -{% if requestParams.jeditaskid and requestParams.jeditaskid != 'None' %}
    Task: {{ requestParams.jeditaskid }} {{ taskname }} {% endif %} -{% if requestParams.taskid and requestParams.taskid != 'None' %}
    Task: {{ requestParams.taskid }} {{ taskname }} {% endif %} -{% if requestParams.jobsetid %}
    Jobset ID: {{ requestParams.jobsetid }} {% endif %} -{% if requestParams.parentid %}
    Parent ID: {{ requestParams.parentid }} {% endif %} - -{% if requestParams.reqid %}
    Request ID: {{ requestParams.reqid }} {% endif %} -{% if requestParams.reqid_from %}
    From request ID: {{ requestParams.reqid_from }} {% endif %} -{% if requestParams.reqid_to %}
    To request ID: {{ requestParams.reqid_to }} {% endif %} - -{% if requestParams.jobname %}
    Job name: {{ requestParams.jobname }} {% endif %} -{% if requestParams.priorityrange %}
    Current priority range: {{ requestParams.priorityrange }} {% endif %} -{% if requestParams.processingtype %}
    Processing type: {{ requestParams.processingtype }} {% endif %} -{% if requestParams.transformation %}
    Transformation: {{ requestParams.transformation }} {% endif %} -

    - -{% if jobList %} -
    Job modification times in this listing range from {{ tfirst }} to {{ tlast }}. -
    Job current priorities in this listing range from {{ plow }} to {{ phigh }}. See priorityrange in the job attribute summary to see how priorities are distributed. -{% endif %} - -{% if flowstruct %} - - - -{% include "googleFlowDiagram.html" with struct=flowstruct %} -{% endif %} - - - -{% if ndrops > 0 %} -
    -{{ ndrops }} jobs were dropped from this listing because they were retried. Where there were retries, the latest retry job (only) is listed. - -
    Click to show/hide dropped jobs
    -
    Switch to nodrop mode - -
    -

    -{% endif %} - -{% if ndrops < 0 %} -
    - Switch to nodrop mode -
    -{% endif %} - - -{% if sumd %} - - -{% for fdict in sumd %} - - - -{% endfor %} -
    Job attribute summary Sort by {% if requestParams.sortby == 'count' %} count, alpha {% else %} count, alpha {% endif %}
    {{ fdict.field }} ({{ fdict.list|length }})
    - {% for item in fdict.list %} - {% if fdict.field == 'JOBSTATUS' %} {% else %} {% endif %} {{ item.kname }} - - {% if fdict.field == 'eventservicestatus' %} ({{ item.kvalue }}) {% else %} - ({{ item.kvalue }}) - {% endif %} - {% endfor %} -
    -
    - - -{% if errsByCount %} - - - - -{% for errval in errsByCount %} - -{% endfor %} -
    Overall error summary
    Category:codeAttempt listNerrorsSample error description
    {{ errval.error }} jobs {{ errval.count }} {{ errval.diag }}
    -{% endif %} - -{% if errsByCount %} -
    Click to show/hide failed jobs details
    -{% endif %} - Prodsys Jobs Handling -

    - - - - - - - - - - - - - - - - - {% if requestParams.jeditaskid %} - - {% endif %} - - - {% for job in jobList %} - - - - - - - - - - - - - - {% if requestParams.jeditaskid %} - - {% endif %} - - - - - - {% endfor %} -
    - Job list -{% if display_limit and display_limit < njobs %} -Only the most recent {{ display_limit }} jobs are shown. Remove the limit and sort by -{% if sortby == "PandaID" %} -PandaID, -{% else %} -PandaID, -{% endif %} -{% else %} -Sort by -{% if sortby == "PandaID" %} -PandaID, -{% else %} -PandaID, -{% endif %} -{% endif %} - - -{% if sortby == "statetime" %} -time since last state change, ascending mod time, descending mod time, priority, attemptnr, ascending duration, descending duration -{% elif sortby == "time-ascending" %} -time since last state change, ascending mod time, descending mod time, priority, attemptnr, ascending duration, descending duration -{% elif sortby == "time-descending" %} -time since last state change, ascending mod time, descending mod time, priority, attemptnr, ascending duration, descending duration -{% elif sortby == "priority" %} -time since last state change, ascending mod time, descending mod time, priority, attemptnr, ascending duration, descending duration -{% elif sortby == "attemptnr" %} -time since last state change, ascending mod time, descending mod time, priority, attemptnr, ascending duration, descending duration -{% elif sortby == "duration-ascending" %} -time since last state change, ascending mod time, descending mod time, priority, attemptnr, descending duration -{% elif sortby == "duration-descending" %} -time since last state change, ascending mod time, descending mod time, priority, attemptnr, ascending duration -{% else %} -time since last state change, ascending mod time, descending mod time, priority, attemptnr, ascending duration, descending duration -{% endif %} - -
    PanDA ID
    Attempt#
    Owner {% if viewParams.MON_VO != 'ATLAS' %} / VO{% endif %}
    Group
    Request
    Task ID
    TransformationStatusCreatedTime to start
    d:h:m:s
    Duration
    d:h:m:s
    Mod{% if viewParams.MON_VO == 'ATLAS' %}Cloud {% endif%}SitePriorityMaximum
    PSS
    Job info
    {{ job.pandaid }}
    Attempt {{ job.attemptnr }}
    {{ job.produsername }}{% if job.workinggroup %}
    {{ job.workinggroup }}{% endif %}{% if viewParams.MON_VO != 'ATLAS' %}{% if job.vo %} / {{ job.vo }}{% endif %} {% endif %} -
    {% if job.reqid %} {{ job.reqid }}
    {% endif %} - {% if job.jeditaskid and job.jeditaskid != 'None' %}{{ job.jeditaskid }} {% elif job.taskid and job.taskid != 'None' %} {{ job.taskid }} {% endif %}
    {{ job.transformation }}{{ job.jobstatus }}{{ job.creationtime }}{{ job.waittime }}{{ job.duration }}{{ job.modificationtime }}{% if viewParams.MON_VO == 'ATLAS' %}{{job.cloud}} {% endif %}{{ job.computingsite }}{{ job.currentpriority }}{% if job.maxpss and job.maxpss != 'None' and job.maxpss != -1 %}{{ job.maxpss }}{% endif %}{% if job.jobinfo != '' %}{{job.jobinfo|force_escape|safe}}
    {% endif %} - {% if job.errorinfo != '' %}{{job.errorinfo|force_escape|safe}}{% endif %} -
    Job name: {{ job.jobname }} #{{ job.attemptnr }}
    Datasets: {% if job.proddblock %} In: {{ job.proddblock }} {% endif %} {% if job.destinationdblock %} {% if job.proddblock %}
    {% endif %} Out: {{ job.destinationdblock }} {% endif %}
    - -{% else %} - -

    -No jobs matched the query. You can increase the time depth with a days=N parameter on the URL, but use judiciously, this is hard on the database. If you know the jobsetid or taskid/jeditaskid of the jobs you're interested in, you can add these to the URL, and the search will not be time limited (because these IDs give the DB enough indexing info that it isn't stressful on the DB to exclude the time constraint) and it will be fast also. -

    - -{% endif %} - -{% endblock %} - - -{% block help %} -{% include "jobListHelp.html" %} -{% include "jobInfoHelp.html" with show="all" %} -{% endblock %} - diff --git a/core/templates/taskInfo.html b/core/templates/taskInfo.html index 2b593430..0e388ac1 100644 --- a/core/templates/taskInfo.html +++ b/core/templates/taskInfo.html @@ -10,6 +10,7 @@ + @@ -34,7 +35,7 @@ {% if task.campaign %} Campaign {% endif %} - {% if task.deftreqid %} + {% if task.deftreqid and viewParams.MON_VO == 'ATLAS' %} Request {% endif %} Type @@ -103,12 +104,18 @@ {% endif %} - {{ task.jeditaskid }} + + {% if viewParams.MON_VO == 'ATLAS' %} + {{ task.jeditaskid }} + {% else %} + {{ task.jeditaskid }} + {% endif %} + {% if task.campaign %} {% if task.campaign_cut %}{{ task.campaign_cut }}{% else %}{{ task.campaign }}{% endif %} {% endif %} - {% if task.deftreqid %} + {% if task.deftreqid and viewParams.MON_VO == 'ATLAS' %} {{ task.deftreqid }} {% endif %} @@ -231,7 +238,7 @@
    Logged status: {{ task.errordialog|safe }}
    {% else %} Switch to nodrop mode {% endif %} - {% if vomode == 'atlas' %} + {% if viewParams.MON_VO == 'ATLAS' %} {% if task.tasktype == 'prod' %} Manage Prod Task @@ -509,9 +516,9 @@
    The split rule(s) has been changed for this task:
    {$ key $} - {$ value $} - {$ value $} - {$ value $} + {% if request.session.rucio_ui|length > 0 %}{$ value $}{% else %}{$ value $}{% endif %} + {% if viewParams.MON_VO == 'ATLAS' %}{$ value $}{% else %}{$ value $}{% endif %} + {% if request.session.rucio_ui|length > 0 %}{$ value $}{% else %}{$ value $}{% endif %} {$ value $} @@ -599,7 +606,7 @@
    The split rule(s) has been changed for this task:
    {% for p in jobparams %} - {{ p|safe }} +
    {{ p|safe }}
    {% endfor %} @@ -615,12 +622,16 @@
    The split rule(s) has been changed for this task:
    {% for p in taskparams %} - {% if p.name != 'jobParameters' and p.name != 'log' %} - - {{ p.name }} - {% if not p.value is None and not p.value == '' %} {{ p.value }} {% else %}—{% endif %} - - {% endif %} + {% if p.name != 'jobParameters' and p.name != 'log' %} + + {{ p.name }} + +
    + {% if not p.value is None and not p.value == '' %} {{ p.value }} {% else %}—{% endif %} +
    + + + {% endif %} {% endfor %} @@ -671,438 +682,439 @@
    The split rule(s) has been changed for this task:
    var jeditaskid = {{ jeditaskid | safe }}; google.charts.load('current', {'packages':['sankey']}); - app.controller('iDDSInfoForTaskController', function ($scope, $http, $rootScope) { - $scope.iddsjson = ""; - $scope.getiDDs = function () { - $http({ - url: '/idds/getiddsfortask/', - method: "GET", - params: {'jeditaskid':{{ jeditaskid|safe }}} - } - ).then(function (res) { - var iddsjson = angular.fromJson(res.data); - if (('data' in iddsjson) && (Object.keys(iddsjson['data']).length > 0)) { - $scope.iddsjson = iddsjson['data']; - idds_data = iddsjson['data']; - $rootScope.$emit('idds_info', idds_data); - $('#iDDsDiv').show(); - } - }); - }; - var init = function () { - $scope.getiDDs(); - }; - init(); +app.controller('iDDSInfoForTaskController', function ($scope, $http, $rootScope) { + $scope.iddsjson = ""; + $scope.getiDDs = function () { + $http({ + url: '/idds/getiddsfortask/', + method: "GET", + params: {'jeditaskid':{{ jeditaskid|safe }}} + } + ).then(function (res) { + var iddsjson = angular.fromJson(res.data); + if (('data' in iddsjson) && (Object.keys(iddsjson['data']).length > 0)) { + $scope.iddsjson = iddsjson['data']; + idds_data = iddsjson['data']; + $rootScope.$emit('idds_info', idds_data); + $('#iDDsDiv').show(); + } + }); + }; + var init = function () { + $scope.getiDDs(); + }; + init(); +}); +app.controller('iDDSGeneralInfo', function($rootScope, $scope) { + $scope.loading = true; + $rootScope.$on('idds_info', function(event, data) { + $scope.iddsjson = data; + $scope.loading = false; }); - app.controller('iDDSGeneralInfo', function($rootScope, $scope) { - $scope.loading = true; - $rootScope.$on('idds_info', function(event, data) { - $scope.iddsjson = data; - $scope.loading = false; - }); +}); +app.controller('StagingDSController', function ($scope, $http) { + $scope.json = ""; + $scope.MON_VO = MON_VO; + $scope.getStagingDS = function () { + $http({ + url: '/api/dc/staginginfofortask/', + method: "GET", + params: {'jeditaskid':jeditaskid} + } + ).then(function (res) { + $scope.json = angular.fromJson(res.data); + if (jeditaskid in $scope.json) { + var displaydiv = document.getElementById('dsStageDiv'); + displaydiv.style.display = ""; + $scope.json = $scope.json[jeditaskid]; + } }); - app.controller('StagingDSController', function ($scope, $http) { - $scope.json = ""; - $scope.MON_VO = MON_VO; - $scope.getStagingDS = function () { - $http({ - url: '/api/dc/staginginfofortask/', - method: "GET", - params: {'jeditaskid':jeditaskid} - } - ).then(function (res) { - $scope.json = angular.fromJson(res.data); - if (jeditaskid in $scope.json) { - var displaydiv = document.getElementById('dsStageDiv'); - displaydiv.style.display = ""; - $scope.json = $scope.json[jeditaskid]; - } - }); - }; - var init = function () { - $scope.getStagingDS(); - }; - if (MON_VO === 'ATLAS') { - init(); + }; + var init = function () { + $scope.getStagingDS(); + }; + if (MON_VO === 'ATLAS') { + init(); + } +}); + +var jc_plot_data = {{ plotsDict|safe }}; +app.controller('jobConsumptionPlotsController', function($scope) { + $scope.taskinfo = {}; + $scope.taskinfo.jc_plots = { + selection: { + category: '', + is_hidden: true, + mode: '' + }, + options: { + category: [] + }, + plot_data: {}, + charts: {} + }; + + $scope.taskinfo.jc_plots.fill = function () { + $scope.taskinfo.jc_plots.plot_data = jc_plot_data; + if (Object.keys($scope.taskinfo.jc_plots.plot_data).length > 1) { + Object.keys($scope.taskinfo.jc_plots.plot_data[1].data.data).forEach((key) => { + $scope.taskinfo.jc_plots.options.category.push(key); + }); + $scope.taskinfo.jc_plots.options.category.sort(); + if ($scope.taskinfo.jc_plots.options.category.includes('run')) { + $scope.taskinfo.jc_plots.selection.category = 'run'; + } + else { + $scope.taskinfo.jc_plots.selection.category = 'all'; + } + } + }; + + $scope.taskinfo.jc_plots.build = function () { + $scope.taskinfo.jc_plots.plot_data.forEach((item) => { + if (item.data.details.type === 'pie') { + $scope.taskinfo.jc_plots.charts[item.name + "_chart"] = draw_donut(item.data.data[$scope.taskinfo.jc_plots.selection.category]['columns'], item.name + "_chart", item.data.details.title, item.data.details) + } + else if (item.data.details.type === 'stack_bar') { + $scope.taskinfo.jc_plots.charts[item.name + "_chart"] = draw_stacked_bar_hist(item.data.data[$scope.taskinfo.jc_plots.selection.category], item.data.details, item.name + "_chart"); + } + }) + }; + + $scope.taskinfo.jc_plots.rebuild = function () { + $scope.taskinfo.jc_plots.destroy(); + $scope.taskinfo.jc_plots.build(); + }; + + $scope.taskinfo.jc_plots.destroy = function () { + let plot_names = Object.keys($scope.taskinfo.jc_plots.charts); + plot_names.forEach((item) => { + if ($scope.taskinfo.jc_plots.charts[item]) { + $scope.taskinfo.jc_plots.charts[item] = $scope.taskinfo.jc_plots.charts[item].destroy(); } }); + }; - var jc_plot_data = {{ plotsDict|safe }}; - app.controller('jobConsumptionPlotsController', function($scope) { - $scope.taskinfo = {}; - $scope.taskinfo.jc_plots = { - selection: { - category: '', - is_hidden: true, - mode: '' - }, - options: { - category: [] + $scope.taskinfo.jc_plots.toggle = function () { + + if (Object.keys($scope.taskinfo.jc_plots.plot_data).length === 0) $scope.taskinfo.jc_plots.fill(); + + if ($scope.taskinfo.jc_plots.options.category.length > 0) { + ($scope.taskinfo.jc_plots.selection.is_hidden) ? $scope.taskinfo.jc_plots.selection.is_hidden = false : $scope.taskinfo.jc_plots.selection.is_hidden = true; + } + }; + +}) +.directive('jcplotDirective', function ($timeout) { + var template = '
    '; + return { + template: template, + scope: { + plot: '=', + parent: '=', }, - plot_data: {}, - charts: {} - }; - - $scope.taskinfo.jc_plots.fill = function () { - $scope.taskinfo.jc_plots.plot_data = jc_plot_data; - if (Object.keys($scope.taskinfo.jc_plots.plot_data).length > 1) { - Object.keys($scope.taskinfo.jc_plots.plot_data[1].data.data).forEach((key) => { - $scope.taskinfo.jc_plots.options.category.push(key); + link: function (scope, element, attrs) { + $timeout(() => { + element.ready(() => { + if (scope.plot.data.details.type === 'pie') { + if ('size' in scope.plot.data.details) {scope.plot.data.details.size[0] = getWidth();} + scope.parent.taskinfo.jc_plots.charts[scope.plot.name + "_chart"] = draw_donut(scope.plot.data.data[scope.parent.taskinfo.jc_plots.selection.category]['columns'], scope.plot.name + "_chart", scope.plot.data.details.title, scope.plot.data.details) + } + else if (scope.plot.data.details.type === 'stack_bar') { + scope.parent.taskinfo.jc_plots.charts[scope.plot.name + "_chart"] = draw_stacked_bar_hist(scope.plot.data.data[scope.parent.taskinfo.jc_plots.selection.category], scope.plot.data.details, scope.plot.name + "_chart"); + } + }); }); - $scope.taskinfo.jc_plots.options.category.sort(); - if ($scope.taskinfo.jc_plots.options.category.includes('run')) { - $scope.taskinfo.jc_plots.selection.category = 'run'; - } - else { - $scope.taskinfo.jc_plots.selection.category = 'all'; - } } - }; - - $scope.taskinfo.jc_plots.build = function () { - $scope.taskinfo.jc_plots.plot_data.forEach((item) => { - if (item.data.details.type === 'pie') { - $scope.taskinfo.jc_plots.charts[item.name + "_chart"] = draw_donut(item.data.data[$scope.taskinfo.jc_plots.selection.category]['columns'], item.name + "_chart", item.data.details.title, item.data.details) - } - else if (item.data.details.type === 'stack_bar') { - $scope.taskinfo.jc_plots.charts[item.name + "_chart"] = draw_stacked_bar_hist(item.data.data[$scope.taskinfo.jc_plots.selection.category], item.data.details, item.name + "_chart"); + }; +}); + + +app.controller('taskflowController', function ($scope, $http, $sce) { + $scope.taskflow = { + message: $sce.trustAsHtml(' Loading... '), + data: [], + is_hidden: true, + plot_data: { + columns: [], + }, + plot: "", + }; + $scope.taskflow.get = function () { + $http({ + url: "{% url 'taskFlowDiagram' jeditaskid %}", + method: "GET", + params: {'json':1} + } + ).then(function success(response) { + $scope.taskflow.message = ""; + $scope.taskflow.data = angular.fromJson(response.data.data); + console.log(response.data.data) + if ($scope.taskflow.data.length > 0) { + $scope.taskflow.buildPlot(); + } + else { + $scope.taskflow.message = $sce.trustAsHtml("No data received :("); + } + }, + function error(response) { + console.log(response); + $scope.taskflow.message = $sce.trustAsHtml('Failed to load data :( '); } - }) - }; - - $scope.taskinfo.jc_plots.rebuild = function () { - $scope.taskinfo.jc_plots.destroy(); - $scope.taskinfo.jc_plots.build(); - }; - - $scope.taskinfo.jc_plots.destroy = function () { - let plot_names = Object.keys($scope.taskinfo.jc_plots.charts); - plot_names.forEach((item) => { - if ($scope.taskinfo.jc_plots.charts[item]) { - $scope.taskinfo.jc_plots.charts[item] = $scope.taskinfo.jc_plots.charts[item].destroy(); + ); + }; + + $scope.taskflow.buildPlot = function() { + var data = new google.visualization.DataTable(); + data.addColumn('string', 'From'); + data.addColumn('string', 'To'); + data.addColumn('number', 'Weight'); + data.addRows($scope.taskflow.data); + + // Sets chart options. + var options = { + width: getWidth()-40, + {#sankey: {#} + {# link: { color: { fill: '#d799ae' } },#} + {# node: { colors: [ '#a61d4c' ]},#} + {# },#} + }; + + // Instantiates and draws our chart, passing in some options. + var chart = new google.visualization.Sankey(document.getElementById('sankey_basic')); + chart.draw(data, options); + + }; + $scope.taskflow.toggle = function() { + $scope.taskflow.is_hidden = false; + $scope.taskflow.get(); + }; +}); + + +app.controller('TaskStatusLogController', function ($scope, $http, $sce) { + $scope.status_log = { + message: $sce.trustAsHtml(' Loading... '), + data: [], + is_hidden: true, + data_table: "", + plot_data: { + columns: [], + ext: { + xs: {}, + size: [getWidth() - 40, 400], + colors: "task_states", + labels: ['Time', 'Task attempt'], + }, + }, + plot: "", + }; + $scope.status_log.get = function () { + $http({ + url: "{% url 'gettaskstatuslog' jeditaskid %}", + method: "GET", + params: {'json':1} + } + ).then(function success(response) { + $scope.status_log.message = ""; + $scope.status_log.data = angular.fromJson(response.data); + if ($scope.status_log.data.length > 0) { + $scope.status_log.buildPlot(); + $scope.status_log.buildDataTable(); + } + else { + $scope.status_log.message = $sce.trustAsHtml("No data received :("); + } + }, + function error(response) { + console.log(response); + $scope.status_log.message = $sce.trustAsHtml('Failed to load data :( '); } - }); - }; - - $scope.taskinfo.jc_plots.toggle = function () { - - if (Object.keys($scope.taskinfo.jc_plots.plot_data).length === 0) $scope.taskinfo.jc_plots.fill(); - - if ($scope.taskinfo.jc_plots.options.category.length > 0) { - ($scope.taskinfo.jc_plots.selection.is_hidden) ? $scope.taskinfo.jc_plots.selection.is_hidden = false : $scope.taskinfo.jc_plots.selection.is_hidden = true; - } - }; - - }) - .directive('jcplotDirective', function ($timeout) { - var template = '
    '; - return { - template: template, - scope: { - plot: '=', - parent: '=', - }, - link: function (scope, element, attrs) { - $timeout(() => { - element.ready(() => { - if (scope.plot.data.details.type === 'pie') { - if ('size' in scope.plot.data.details) {scope.plot.data.details.size[0] = getWidth();} - scope.parent.taskinfo.jc_plots.charts[scope.plot.name + "_chart"] = draw_donut(scope.plot.data.data[scope.parent.taskinfo.jc_plots.selection.category]['columns'], scope.plot.name + "_chart", scope.plot.data.details.title, scope.plot.data.details) - } - else if (scope.plot.data.details.type === 'stack_bar') { - scope.parent.taskinfo.jc_plots.charts[scope.plot.name + "_chart"] = draw_stacked_bar_hist(scope.plot.data.data[scope.parent.taskinfo.jc_plots.selection.category], scope.plot.data.details, scope.plot.name + "_chart"); - } - }); + ); + }; + $scope.status_log.buildDataTable = function () { + $scope.status_log.data_table = $('#taskstatuslogtable').dataTable({ + "lengthMenu": [[10, 20, 50, 100, 200, -1], [10, 20, 50, 100, 200, "All"]], + "paging": true, + "scrollX": true, + "aaSorting": [[1,'asc']], + "data": $scope.status_log.data, + "aoColumns": [ + { + title: "Attempt", + data: "attemptnr", + sDefaultContent: "-", + }, + { + title: "Time", + data: "modiftime_str", + sDefaultContent: "-", + }, + { + title: "Status", + data: "status", + sDefaultContent: "-", + className: 'state', + }, + { + title: "Duration, d:h:m:s", + data: "duration", + sDefaultContent: "-", + }, + { + title: "Modification host", + data: "modificationhost", + sDefaultContent: "-", + }, + { + title: "Reason", + data: "reason", + sDefaultContent: "-", + }, + + ], + "createdRow": function ( row, data, index ) { + $('td', row).eq(2).addClass(data['status'] + '_fill'); + } + }) + }; + $scope.status_log.buildPlot = function() { + if ($scope.status_log.data.length > 0) { + let tmp_obj = {}; + $scope.status_log.plot_data.columns.push(['x', ]); + $scope.status_log.data.forEach((item) => { + tmp_obj[item.status] = [item.status, ]; + $scope.status_log.plot_data.columns[0].push(item.modiftime_str); + }); + $scope.status_log.data.forEach((item) => { + Object.keys(tmp_obj).forEach((key) => { + (item.status === key) ? tmp_obj[key].push(item.attemptnr+1) : tmp_obj[key].push(0); }); + }); + Object.keys(tmp_obj).forEach((key) => { + $scope.status_log.plot_data.columns.push([key, ...tmp_obj[key]]); + }) + } + $scope.status_log.plot = draw_steps($scope.status_log.plot_data.columns, 'tsl_plot', 'Task statuses transition', $scope.status_log.plot_data.ext); + }; + $scope.status_log.toggle = function() { + $scope.status_log.is_hidden = false; + $scope.status_log.get(); + }; +}); + +app.controller('TaskLogsController', function ($scope, $http, $sce) { + $scope.task_logs = { + message: $sce.trustAsHtml(' Loading... '), + data: [], + is_hidden: true, + data_table: "", + plot_data: { + columns: [], + ext: { + xs: {}, + size: [getWidth() - 40, 400], + colors: "task_states", + labels: ['Time', 'Task attempt'], + }, + }, + plot: "", + }; + $scope.task_logs.get = function () { + $http({ + url: "{% url 'gettasklogs' jeditaskid %}", + method: "GET", + params: {'json':1} } - }; - }); - - - app.controller('taskflowController', function ($scope, $http, $sce) { - $scope.taskflow = { - message: $sce.trustAsHtml(' Loading... '), - data: [], - is_hidden: true, - plot_data: { - columns: [], - }, - plot: "", - }; - $scope.taskflow.get = function () { - $http({ - url: "{% url 'taskFlowDiagram' jeditaskid %}", - method: "GET", - params: {'json':1} - } - ).then(function success(response) { - $scope.taskflow.message = ""; - $scope.taskflow.data = angular.fromJson(response.data.data); - console.log(response.data.data) - if ($scope.taskflow.data.length > 0) { - $scope.taskflow.buildPlot(); - } - else { - $scope.taskflow.message = $sce.trustAsHtml("No data received :("); - } - }, - function error(response) { - console.log(response); - $scope.taskflow.message = $sce.trustAsHtml('Failed to load data :( '); - } - ); - }; - - $scope.taskflow.buildPlot = function() { - var data = new google.visualization.DataTable(); - data.addColumn('string', 'From'); - data.addColumn('string', 'To'); - data.addColumn('number', 'Weight'); - data.addRows($scope.taskflow.data); - - // Sets chart options. - var options = { - width: getWidth()-40, - {#sankey: {#} - {# link: { color: { fill: '#d799ae' } },#} - {# node: { colors: [ '#a61d4c' ]},#} - {# },#} - }; - - // Instantiates and draws our chart, passing in some options. - var chart = new google.visualization.Sankey(document.getElementById('sankey_basic')); - chart.draw(data, options); - - }; - $scope.taskflow.toggle = function() { - $scope.taskflow.is_hidden = false; - $scope.taskflow.get(); - }; - }); - - - app.controller('TaskStatusLogController', function ($scope, $http, $sce) { - $scope.status_log = { - message: $sce.trustAsHtml(' Loading... '), - data: [], - is_hidden: true, - data_table: "", - plot_data: { - columns: [], - ext: { - xs: {}, - size: [getWidth() - 40, 400], - colors: "task_states", - labels: ['Time', 'Task attempt'], - }, - }, - plot: "", - }; - $scope.status_log.get = function () { - $http({ - url: "{% url 'gettaskstatuslog' jeditaskid %}", - method: "GET", - params: {'json':1} - } - ).then(function success(response) { - $scope.status_log.message = ""; - $scope.status_log.data = angular.fromJson(response.data); - if ($scope.status_log.data.length > 0) { - $scope.status_log.buildPlot(); - $scope.status_log.buildDataTable(); - } - else { - $scope.status_log.message = $sce.trustAsHtml("No data received :("); - } - }, - function error(response) { - console.log(response); - $scope.status_log.message = $sce.trustAsHtml('Failed to load data :( '); - } - ); - }; - $scope.status_log.buildDataTable = function () { - $scope.status_log.data_table = $('#taskstatuslogtable').dataTable({ - "lengthMenu": [[10, 20, 50, 100, 200, -1], [10, 20, 50, 100, 200, "All"]], - "paging": true, - "scrollX": true, - "aaSorting": [[1,'asc']], - "data": $scope.status_log.data, - "aoColumns": [ - { - title: "Attempt", - data: "attemptnr", - sDefaultContent: "-", - }, - { - title: "Time", - data: "modiftime_str", - sDefaultContent: "-", - }, - { - title: "Status", - data: "status", - sDefaultContent: "-", - className: 'state', - }, - { - title: "Duration, d:h:m:s", - data: "duration", - sDefaultContent: "-", - }, - { - title: "Modification host", - data: "modificationhost", - sDefaultContent: "-", - }, - { - title: "Reason", - data: "reason", - sDefaultContent: "-", - }, - - ], - "createdRow": function ( row, data, index ) { - $('td', row).eq(2).addClass(data['status'] + '_fill'); - } - }) - }; - $scope.status_log.buildPlot = function() { - if ($scope.status_log.data.length > 0) { - let tmp_obj = {}; - $scope.status_log.plot_data.columns.push(['x', ]); - $scope.status_log.data.forEach((item) => { - tmp_obj[item.status] = [item.status, ]; - $scope.status_log.plot_data.columns[0].push(item.modiftime_str); - }); - $scope.status_log.data.forEach((item) => { - Object.keys(tmp_obj).forEach((key) => { - (item.status === key) ? tmp_obj[key].push(item.attemptnr+1) : tmp_obj[key].push(0); - }); - }); - Object.keys(tmp_obj).forEach((key) => { - $scope.status_log.plot_data.columns.push([key, ...tmp_obj[key]]); - }) + ).then(function success(response) { + $scope.task_logs.message = ""; + $scope.task_logs.data = angular.fromJson(response.data); + if ($scope.task_logs.data.length > 0) { + $scope.task_logs.buildDataTable(); + } + else { + $scope.task_logs.message = $sce.trustAsHtml("No data received :("); } - $scope.status_log.plot = draw_steps($scope.status_log.plot_data.columns, 'tsl_plot', 'Task statuses transition', $scope.status_log.plot_data.ext); - }; - $scope.status_log.toggle = function() { - $scope.status_log.is_hidden = false; - $scope.status_log.get(); - }; - }); - - app.controller('TaskLogsController', function ($scope, $http, $sce) { - $scope.task_logs = { - message: $sce.trustAsHtml(' Loading... '), - data: [], - is_hidden: true, - data_table: "", - plot_data: { - columns: [], - ext: { - xs: {}, - size: [getWidth() - 40, 400], - colors: "task_states", - labels: ['Time', 'Task attempt'], - }, }, - plot: "", - }; - $scope.task_logs.get = function () { - $http({ - url: "{% url 'gettasklogs' jeditaskid %}", - method: "GET", - params: {'json':1} - } - ).then(function success(response) { - $scope.task_logs.message = ""; - $scope.task_logs.data = angular.fromJson(response.data); - if ($scope.task_logs.data.length > 0) { - $scope.task_logs.buildDataTable(); - } - else { - $scope.task_logs.message = $sce.trustAsHtml("No data received :("); - } - }, - function error(response) { - console.log(response); - $scope.task_logs.message = $sce.trustAsHtml('Failed to load data :( '); - } - ); - }; - $scope.task_logs.buildDataTable = function () { - $scope.task_logs.data_table = $('#tasklogstable').dataTable({ - "lengthMenu": [[10, 20, 50, 100, 200, -1], [10, 20, 50, 100, 200, "All"]], - "paging": true, - "scrollX": true, - "aaSorting": [[1,'asc']], - "data": $scope.task_logs.data, - {#rowsGroup: [0],#} - "aoColumns": [ - { - title: "LogName", - data: "logname", - sDefaultContent: "-", - "render": function (data, type, full, meta) { - var link_to_kibana = '' - if (full['logname'] == 'recoverlostfiles') { - link_to_kibana = '' + full['logname'] + ' (panda log)' + '' - } - else { - link_to_kibana = '' + full['logname'] + '' - } - return link_to_kibana + function error(response) { + console.log(response); + $scope.task_logs.message = $sce.trustAsHtml('Failed to load data :( '); + } + ); + }; + $scope.task_logs.buildDataTable = function () { + $scope.task_logs.data_table = $('#tasklogstable').dataTable({ + "lengthMenu": [[10, 20, 50, 100, 200, -1], [10, 20, 50, 100, 200, "All"]], + "paging": true, + "scrollX": true, + "aaSorting": [[1,'asc']], + "data": $scope.task_logs.data, + {#rowsGroup: [0],#} + "aoColumns": [ + { + title: "LogName", + data: "logname", + sDefaultContent: "-", + "render": function (data, type, full, meta) { + var link_to_kibana = '' + if (full['logname'] == 'recoverlostfiles') { + link_to_kibana = '' + full['logname'] + ' (panda log)' + '' } - }, - { - title: "LogLevel", - data: "loglevel", - sDefaultContent: "-", - "render": function (data, type, full, meta) { - var link_to_kibana = '' - if (full['logname'] == 'recoverlostfiles') { - link_to_kibana = '' + full['loglevel'] + '' - } - else { - link_to_kibana = '' + full['loglevel'] + '' - } - return link_to_kibana + else { + link_to_kibana = '' + full['logname'] + '' } - }, - { - title: "#", - data: "lcount", - sDefaultContent: "-", - + return link_to_kibana + } + }, + { + title: "LogLevel", + data: "loglevel", + sDefaultContent: "-", + "render": function (data, type, full, meta) { + var link_to_kibana = '' + if (full['logname'] == 'recoverlostfiles') { + link_to_kibana = '' + full['loglevel'] + '' + } + else { + link_to_kibana = '' + full['loglevel'] + '' + } + return link_to_kibana } + }, + { + title: "#", + data: "lcount", + sDefaultContent: "-", - ], - "createdRow": function ( row, data, index ) { - $('td', row).eq(2).addClass(data['status'] + '_fill'); } - }) - }; - $scope.task_logs.toggle = function() { - $scope.task_logs.is_hidden = false; - $scope.task_logs.get(); - }; - }); - $(document).ready(function () { - var dataset_list = {{ datasets|safe }}; - buildDatasetsTable(dataset_list); - var url = window.location.href; - if (url.indexOf("#plots") > -1) { - togglePlots(); - } - if (url.indexOf("#statuslog") > -1) { - toggleStatusLog(); - } - if (url.indexOf("#tasklogs") > -1) { - toggleTaskLogs(); - } - }); + ], + "createdRow": function ( row, data, index ) { + $('td', row).eq(2).addClass(data['status'] + '_fill'); + } + }) + }; + $scope.task_logs.toggle = function() { + $scope.task_logs.is_hidden = false; + $scope.task_logs.get(); + }; +}); + +$(document).ready(function () { + $(".comment").shorten({showChars: getNCharsShorten(), minHideChars: 250}); + var dataset_list = {{ datasets|safe }}; + buildDatasetsTable(dataset_list); + var url = window.location.href; + if (url.indexOf("#plots") > -1) { + togglePlots(); + } + if (url.indexOf("#statuslog") > -1) { + toggleStatusLog(); + } + if (url.indexOf("#tasklogs") > -1) { + toggleTaskLogs(); + } +}); function togglePlots() { let scope = angular.element(document.getElementById('plots')).scope(); @@ -1280,9 +1292,13 @@
    The split rule(s) has been changed for this task:
    "data": "type", sDefaultContent: "-", "render": function(data, type, row, meta) { - var links = ''; - if (row['type'] == 'input') { - links += ', jobs ' + var links = ''; + if ("{{ request.session.rucio_ui }}".length > 0) { + links += ''; + } + if (row['type'] === 'input') { + links += (links.length > 0) ? ', ': ''; + links += ' jobs ' } return links; } diff --git a/core/templates/taskInfoES.html b/core/templates/taskInfoES.html index 6f7ecefe..0eb44b59 100644 --- a/core/templates/taskInfoES.html +++ b/core/templates/taskInfoES.html @@ -15,6 +15,7 @@ + @@ -336,8 +337,11 @@ "data": "type", sDefaultContent: "-", "render": function(data, type, row, meta) { - var links = ''; - if (row['type'] == 'input') { + var links = ''; + if ("{{ request.session.rucio_ui }}".length > 0) { + links += ''; + } + if (row['type'] === 'input') { links += ', jobs ' } return links; @@ -366,6 +370,7 @@ } $(document).ready(function () { + $(".comment").shorten({showChars: getNCharsShorten(), minHideChars: 250}); var dataset_list = {{ datasets|safe }}; buildDatasetsTable(dataset_list); var url = window.location.href; @@ -397,7 +402,7 @@ Task ID - {% if task.deftreqid %} + {% if task.deftreqid and viewParams.MON_VO == 'ATLAS' %} Request {% endif %} {% if task.reqid != task.jeditaskid and not task.deftreqid %} Jobset{% endif %} @@ -423,9 +428,15 @@ Tracker {% endif %} - {{ task.jeditaskid }} - {% if task.deftreqid %} + + {% if viewParams.MON_VO == 'ATLAS' %} + {{ task.jeditaskid }} + {% else %} + {{ task.jeditaskid }} + {% endif %} + + {% if task.deftreqid and viewParams.MON_VO == 'ATLAS' %} {{ task.deftreqid }} {% endif %} @@ -502,7 +513,7 @@
    Logged status: {{ task.errordialog|safe }}
    {% else %} Nodrop mode {% endif %} - {% if vomode == 'atlas' %} + {% if viewParams.MON_VO == 'ATLAS' %} {% if task.tasktype == 'prod' %} Prod Task view (to manage task) @@ -785,7 +796,7 @@
    Warning!
    {% for p in jobparams %} - {{ p|safe }} +
    {{ p|safe }}
    {% endfor %} @@ -803,7 +814,11 @@
    Warning!
    {% if p.name != 'jobParameters' and p.name != 'log' %} {{ p.name }} - {% if p.value != None %} {{ p.value }} {% endif %} + +
    + {% if not p.value is None and not p.value == '' %} {{ p.value }} {% else %}—{% endif %} +
    + {% endif %} {% endfor %} diff --git a/core/templates/taskInfoESNew.html b/core/templates/taskInfoESNew.html index 5627a604..667d7313 100644 --- a/core/templates/taskInfoESNew.html +++ b/core/templates/taskInfoESNew.html @@ -36,7 +36,7 @@ Task ID - {% if task.deftreqid %} + {% if task.deftreqid and viewParams.MON_VO == 'ATLAS' %} Request {% endif %} Type @@ -74,9 +74,15 @@ {% endif %} - {{ task.jeditaskid }} - {% if task.deftreqid %} + + {% if viewParams.MON_VO == 'ATLAS' %} + {{ task.jeditaskid }} + {% else %} + {{ task.jeditaskid }} + {% endif %} + + {% if task.deftreqid and viewParams.MON_VO == 'ATLAS' %} {{ task.deftreqid }} {% endif %} @@ -156,7 +162,7 @@
    Logged status: {{ task.errordialog|safe }}