Commit 0ca5d75c authored by Neil Williams's avatar Neil Williams
Browse files

Drop the migration status page.

Devices which have not migrated are neither used nor displayed.

Change-Id: Ida323e4e378c54b10e166ff45fc92da4dc5194be
parent b37fe944
Migration Status
################
.. seealso:: :ref:`admin_introduction` and :ref:`migrating_to_pipeline`
Active devices in this context are defined as:
#. Device status is not :term:`RETIRED <retired>`.
#. Device type for this device has ``Display`` set to True.
For health checks, devices where the health check has been disabled are
excluded.
.. seealso:: :ref:`health_checks`
Migration of active V1 devices to V2
====================================
Shows 100% completion when all active devices in the database have
``is_pipeline`` set to True.
If incomplete, a list of active devices still using V1 will be displayed.
.. seealso:: :ref:`django_admin_interface`
Active devices exclusive to V2
==============================
Shows 100% completion when all active devices in the database are set to
:term:`exclusive` in the :term:`device dictionary`.
If incomplete, a list of active devices which are not exclusive to V2 will
be displayed.
.. seealso:: :ref:`admin_device_dictionary`
Migration of active devices to V2 healthchecks
==============================================
Shows 100% completion when all device-types of active devices have had the
health check job definition cleared from the database.
If incomplete, a list of active devices still using healthchecks in the
database will be displayed. The :term:`device type` of these devices will need
to be checked.
.. seealso:: :ref:`django_admin_interface`
Active devices with healthchecks
================================
Shows 100% completion when health checks exist in
``/etc/lava-server/dispatcher-config/health-checks`` for all active devices in
the database.
If incomplete, a list of active devices without healthchecks will be
displayed, together with the name of the relevant V2 health check which the
device would use.
.. note:: Unlike the other migration checks, this one does **not** need to be
100% complete before support for V1 submissions is disabled as long as the
devices concerned are :term:`exclusive` and other measures are at 100%.
......@@ -53,8 +53,6 @@
class="glyphicon glyphicon-phone pull-right green"></span> Active Devices</a></li>
<li><a href="{% url 'lava.scheduler.labhealth' %}"><span
class="glyphicon glyphicon-heart pull-right"></span> Devices Health</a></li>
<li><a href="{% url 'lava.scheduler.migration' %}"><span
class="glyphicon glyphicon-bell pull-right red"></span> Migration</a></li>
</ul>
</div>
<div class="col-md-4">
......
{% extends "layouts/content-bootstrap.html" %}
{% load django_tables2 %}
{% load utils %}
{% block content %}
<h2>Migration Status</h2>
<h3>Migration of active V1 devices to V2</h3>
<p>Number of active devices which have V2 support.</p>
<div class="progress">
<div
{% if active_percent < 50 %}
class="progress-bar progress-bar-danger"
{% elif active_percent < 75 %}
class="progress-bar progress-bar-warning"
{% else %}
class="progress-bar progress-bar-success"
{% endif %}
role="progressbar" aria-valuenow="{{ active_v1_devices }}"
aria-valuemin="0" aria-valuemax="{{ active_devices }}"
style="width: {{ active_percent }}%; min-width: 6em">
{{ active_level }} of {{ active_devices }}
</div>
</div>
{% if active_level != active_devices %}
<h4>Active devices still using V1:</h4>
{% for hostname, dev in v1_problems.items %}
[ <a href="{{ dev }}">{{ hostname }}</a> ]
{% endfor %}
{% endif %}
<h3>Active V2 devices exclusive to V2</h3>
<p>Number of active V2 devices which only support V2.</p>
<div class="progress">
<div
{% if exclusion < 50 %}
class="progress-bar progress-bar-danger"
{% elif exclusion < 75 %}
class="progress-bar progress-bar-warning"
{% else %}
class="progress-bar progress-bar-success"
{% endif %}
role="progressbar" aria-valuenow="{{ exclusive_count }}"
aria-valuemin="0" aria-valuemax="{{ active_devices }}"
style="width: {{ exclusion }}%; min-width: 6em">
{{ exclusive_count }} of {{ active_devices }}
</div>
</div>
{% if exclusive_count != active_devices %}
<h4>Active V2 devices which are not exclusive to V2:</h4>
{% for hostname, dev in exclusive.items %}
[ <a href="{{ dev }}">{{ hostname }}</a> ]
{% endfor %}
{% endif %}
<h3>Migration of active devices to enabled V2 healthchecks</h3>
<p>Number of active devices with enabled health checks undefined in the database.</p>
<div class="progress">
<div
{% if hc_percent < 50 %}
class="progress-bar progress-bar-danger"
{% elif hc_percent < 75 %}
class="progress-bar progress-bar-warning"
{% else %}
class="progress-bar progress-bar-success"
{% endif %}
role="progressbar" aria-valuenow="{{ healthchecks }}"
aria-valuemin="0" aria-valuemax="{{ active_health }}"
style="width: {{ hc_percent }}%; min-width: 6em">
{{ healthchecks }} of {{ active_health }}
</div>
</div>
{% if health_check_level > 0 %}
<h4>Active devices still using healthchecks in the database:</h4>
{% for hostname, dev in db_healthchecks.items %}
[ <a href="{{ dev }}">{{ hostname }}</a> ]
{% endfor %}
{% endif %}
<h3>Active devices with enabled healthchecks</h3>
<p>Number of active devices with an enabled healthcheck.</p>
<div class="progress">
<div
{% if no_hc_percent < 25 %}
class="progress-bar progress-bar-danger"
{% elif no_hc_percent < 50 %}
class="progress-bar progress-bar-warning"
{% else %}
class="progress-bar progress-bar-success"
{% endif %}
role="progressbar" aria-valuenow="{{ nonhc_devices }}"
aria-valuemin="0" aria-valuemax="{{ active_health }}"
style="width: {{ no_hc_percent }}%; min-width: 6em">
{{ nonhc_devices }} of {{ active_health }}
</div>
</div>
{% if no_healthcheck %}
<h4>Active devices without V2 healthchecks:</h4>
<ul>
{% for hostname, dev_url in no_healthcheck.items %}
<li><a href="{{ dev_url }}">{{ hostname }}</a> {{ templates|get_item:hostname }}</li>
{% endfor %}
</ul>
{% endif %}
{% endblock %}
\ No newline at end of file
......@@ -15,7 +15,7 @@ from lava_scheduler_app.views import (
job_log_file_plain, job_log_incremental, job_log_pipeline_incremental,
job_output, job_pipeline_incremental, job_pipeline_sections,
job_pipeline_timing, job_resubmit, job_section_log, job_status,
job_submit, job_toggle_favorite, lab_health, migration,
job_submit, job_toggle_favorite, lab_health,
longest_jobs, multinode_job_definition, multinode_job_definition_plain,
mydevice_list, mydevices_health_history_log, myjobs, online_device_list,
passing_health_checks, queue, reports,
......@@ -174,5 +174,4 @@ urlpatterns = [
name='lava_scheduler_download_device_type_yaml'),
url(r'^job/(?P<pk>[0-9]+|[0-9]+.[0-9]+)/similarjobs$', similar_jobs,
name='lava.scheduler.job.similar_jobs'),
url(r'^migration$', migration, name='lava.scheduler.migration'),
]
......@@ -2880,103 +2880,3 @@ def similar_jobs(request, pk):
"%s?entity=%s&conditions=%s" % (
reverse('lava.results.query_custom'),
entity, conditions))
@BreadCrumb("Migration", parent=index)
def migration(request):
# with no devices, there is nothing to do, so start at 100%
# only once there are devices do the calculations make any sense.
active_percent = 100
exclusion = 100
hc_percent = 100
no_hc_percent = 100
v1_problems = {}
db_healthchecks = {}
no_healthcheck = {}
exclusive = {}
# total active
active = Device.objects.filter(
~Q(status=Device.RETIRED),
Q(device_type__display=True))
active_count = len(active)
active_v1 = Device.objects.filter(
Q(is_pipeline=False), ~Q(status=Device.RETIRED),
Q(device_type__display=True)
)
active_v1_count = len(active_v1)
active_health = active.filter(device_type__disable_health_check=False)
active_h_count = active_health.count()
healthchecks = active_h_count
# all active devices (V1 and V2), including disabled healthchecks
for dev in active_v1:
# all active devices which support V1 at all need migration
v1_problems[dev.hostname] = dev.get_absolute_url()
if dev.device_type.health_check_job not in ['', None]:
# all active devices with a database health check need migration.
healthchecks -= 1
db_healthchecks[dev.hostname] = dev.get_absolute_url()
# check V2 exclusive, including disabled healthchecks
v2_devices = Device.objects.filter(
Q(is_pipeline=True), ~Q(status=Device.RETIRED),
Q(device_type__display=True))
for dev in v2_devices:
if not dev.is_exclusive:
# highlight devices which are not exclusive to V2
exclusive[dev.hostname] = dev.get_absolute_url()
# iterate over all devices, excluding disabled healthchecks
for dev in active_health:
hc = dev.get_health_check()
# Health checks are not pulled from the database anymore: only count
# device that are missing a health-check file.
if not hc:
no_healthcheck[dev.hostname] = dev.get_absolute_url()
if active_count:
active_percent = int(100 * (active_count - active_v1_count) / active_count)
exclusion = int(100 * (active_count - len(exclusive.keys())) / active_count)
if active_h_count:
hc_percent = int(100 * healthchecks / active_h_count)
no_hc_percent = int(100 * (active_h_count - len(no_healthcheck.keys())) / active_h_count)
templates = {}
# V2 devices without a health check
# lookup the appropriate health check filename
for hostname, _ in no_healthcheck.items():
device = Device.objects.get(hostname=hostname)
extends = device.get_extends()
if not extends:
templates[hostname] = ''
continue
templates[hostname] = "%s.yaml" % extends
template = loader.get_template("lava_scheduler_app/migration.html")
return HttpResponse(template.render(
{
# 'migration_table': migration_ptable,
'bread_crumb_trail': BreadCrumbTrail.leading_to(migration),
'v1_problems': v1_problems,
'db_healthchecks': db_healthchecks,
'no_healthcheck': no_healthcheck,
'exclusive_count': active_count - len(exclusive.keys()),
'exclusive': exclusive,
'exclusion': exclusion,
'active_devices': active_count,
'active_health': active_h_count,
'active_v1_devices': active_v1_count,
'active_level': active_count - active_v1_count,
'active_percent': active_percent,
'healthchecks': healthchecks,
'health_check_level': active_h_count - healthchecks,
'hc_percent': hc_percent,
'nonhc_devices': active_h_count - len(no_healthcheck.keys()),
'no_hc_percent': no_hc_percent,
'templates': templates,
'context_help': BreadCrumbTrail.show_help(migration),
},
request=request))
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment