Commit 41672fee authored by Rémi Duraffort's avatar Rémi Duraffort
Browse files

job logs: replace TestJob.output_file by logutils functions



This way the exact name of the job log file is only in the helper. This will
allow to have compressed job log file.
Signed-off-by: default avatarRémi Duraffort <remi.duraffort@linaro.org>
parent c3bf98bd
......@@ -20,9 +20,11 @@
from functools import wraps
from simplejson import JSONDecodeError
import yaml
import os
import sys
import xmlrpc.client
import yaml
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.db.models import Count, Q
......@@ -295,11 +297,14 @@ class SchedulerAPI(ExposedAPI):
except TestJob.DoesNotExist:
raise xmlrpc.client.Fault(404, "Specified job not found.")
output_file = job.output_file()
if output_file:
output_file.seek(offset)
return xmlrpc.client.Binary(output_file.read().encode("UTF-8"))
else:
# Open the logs
output_path = os.path.join(job.output_dir, "output.yaml")
try:
with open(output_path, encoding="utf-8", errors="replace") as f_logs:
if f_logs:
f_logs.seek(offset)
return xmlrpc.client.Binary(f_logs.read().encode("UTF-8"))
except OSError:
raise xmlrpc.client.Fault(404, "Job output not found.")
def all_devices(self):
......
......@@ -68,6 +68,13 @@ def read_logs(dir_name, start=0, end=None):
return f_log.read(end_offset - start_offset).decode("utf-8")
def size_logs(dir_name):
directory = pathlib.Path(dir_name)
with (directory / "output.yaml").open("rb") as f_log:
f_log.seek(0, 2)
return f_log.tell()
def write_logs(f_log, f_idx, line):
f_idx.write(struct.pack(PACK_FORMAT, f_log.tell()))
f_idx.flush()
......
......@@ -54,6 +54,7 @@ from django_restricted_resource.models import (
from lava_common.exceptions import ConfigurationError
from lava_results_app.utils import export_testcase
from lava_scheduler_app import utils
from lava_scheduler_app.logutils import read_logs
from lava_scheduler_app.managers import (
RestrictedTestJobQuerySet,
GroupObjectPermissionManager,
......@@ -1790,13 +1791,6 @@ class TestJob(RestrictedResource, RestrictedObject):
str(self.id),
)
def output_file(self):
output_path = os.path.join(self.output_dir, "output.yaml")
if os.path.exists(output_path):
return open(output_path, encoding="utf-8", errors="replace")
else:
return None
failure_tags = models.ManyToManyField(
JobFailureTag, blank=True, related_name="failure_tags"
)
......@@ -2062,9 +2056,8 @@ class TestJob(RestrictedResource, RestrictedObject):
data["token"] = token
# Logs.
output_file = self.output_file()
if output_file and output:
data["log"] = self.output_file().read()
with contextlib.suppress(OSError):
data["log"] = read_logs(self.output_dir)
# Results.
if results:
......
......@@ -79,7 +79,7 @@ from lava_scheduler_app.dbutils import (
testjob_submission,
validate_job,
)
from lava_scheduler_app.logutils import read_logs
from lava_scheduler_app.logutils import size_logs, read_logs
from lava_scheduler_app.templatetags.utils import udecode
from lava.utils.lavatable import LavaView
......@@ -1313,18 +1313,13 @@ def job_detail(request, pk):
}
try:
with open(os.path.join(job.output_dir, "output.yaml"), "r") as f_in:
# Compute the size of the file
f_in.seek(0, 2)
job_file_size = f_in.tell()
if job_file_size >= job.size_limit:
log_data = []
data["size_warning"] = job.size_limit
else:
# Go back to the start and load the file
f_in.seek(0, 0)
log_data = yaml.load(f_in, Loader=yaml.CLoader)
job_file_size = size_logs(job.output_dir)
if job_file_size >= job.size_limit:
log_data = []
data["size_warning"] = job.size_limit
else:
log_data = read_logs(job.output_dir)
log_data = yaml.load(log_data, Loader=yaml.CLoader)
except OSError:
log_data = []
except yaml.YAMLError:
......@@ -1567,9 +1562,8 @@ def job_status(request, pk):
def job_timing(request, pk):
job = get_restricted_job(request.user, pk, request=request)
try:
logs = yaml.load(
open(os.path.join(job.output_dir, "output.yaml")), Loader=yaml.CLoader
)
data = read_logs(job.output_dir)
logs = yaml.load(data, Loader=yaml.CLoader)
except OSError:
raise Http404
......@@ -1688,24 +1682,11 @@ def job_configuration(request, pk):
def job_log_file_plain(request, pk):
job = get_restricted_job(request.user, pk, request=request)
# Old style jobs
log_file = job.output_file()
if log_file:
response = StreamingHttpResponse(
log_file, content_type="text/plain; charset=utf-8"
)
response["Content-Transfer-Encoding"] = "quoted-printable"
try:
data = read_logs(job.output_dir)
response = StreamingHttpResponse(data, content_type="application/yaml")
response["Content-Disposition"] = "attachment; filename=job_%d.log" % job.id
return response
# New pipeline jobs
try:
with open(os.path.join(job.output_dir, "output.yaml"), "r") as log_file:
response = StreamingHttpResponse(
log_file.readlines(), content_type="application/yaml"
)
response["Content-Disposition"] = "attachment; filename=job_%d.log" % job.id
return response
except OSError:
raise Http404
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment