Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

extra log files: remove legacy #5672

Merged
merged 1 commit into from
Oct 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion cylc/flow/data_messages.proto
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ message PbRuntime {

// Nodes
message PbJob {
reserved 29; /* see https://github.com/cylc/cylc-flow/pull/5672 */
optional string stamp = 1;
optional string id = 2;
optional int32 submit_num = 3;
Expand All @@ -144,7 +145,6 @@ message PbJob {
optional float execution_time_limit = 14;
optional string platform = 15;
optional string job_log_dir = 17;
repeated string extra_logs = 29;
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@dwsutherland

This field is no longer needed, is it safe to remove a "repeated string"?

Could this cause compatibility problems between cylc-flow and cylc-uiserver at different versions?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this is fine, since I don't think we ask for this field anywhere.. And as long as we don't reuse 29..

optional string name = 30; /* filter item */
optional string cycle_point = 31; /* filter item */
repeated string messages = 32;
Expand Down
121 changes: 60 additions & 61 deletions cylc/flow/data_messages_pb2.py

Large diffs are not rendered by default.

1 change: 0 additions & 1 deletion cylc/flow/data_store_mgr.py
Original file line number Diff line number Diff line change
Expand Up @@ -1392,7 +1392,6 @@ def insert_job(self, name, cycle_point, status, job_conf):
# Add in log files.
j_buf.job_log_dir = get_task_job_log(
self.schd.workflow, tproxy.cycle_point, tproxy.name, sub_num)
j_buf.extra_logs.extend(job_conf.get('logfiles', []))

self.added[JOBS][j_id] = j_buf
getattr(self.updated[WORKFLOW], JOBS).append(j_id)
Expand Down
1 change: 0 additions & 1 deletion cylc/flow/job_runner_handlers/documentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,6 @@ class MyHandler():
* ``job_file_path``
* ``job_runner_command_template``
* ``job_runner_name``
* ``logfiles``
* ``namespace_hierarchy``
* ``param_var``
* ``platform``
Expand Down
5 changes: 0 additions & 5 deletions cylc/flow/network/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -887,11 +887,6 @@ class Meta:
job_log_dir = String(
description="The path to the job's log directory.",
)
extra_logs = graphene.List(
# TODO: remove. see https://github.com/cylc/cylc-flow/issues/5610
String,
description='Obsolete, do not use.',
)
messages = graphene.List(
String,
description='The list of task messages generated by this job.',
Expand Down
1 change: 0 additions & 1 deletion cylc/flow/scripts/dump.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@
startedTime
finishedTime
jobLogDir
extraLogs
platform
executionTimeLimit
jobRunnerName
Expand Down
16 changes: 5 additions & 11 deletions cylc/flow/task_job_mgr.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
from contextlib import suppress
import json
import os
from copy import deepcopy
from logging import (
CRITICAL,
DEBUG,
Expand Down Expand Up @@ -1190,12 +1189,11 @@ def _prep_submit_task_job(
self._set_retry_timers(itask, rtconfig)

try:
job_conf = {
**self._prep_submit_task_job_impl(
workflow, itask, rtconfig
),
'logfiles': deepcopy(itask.summary['logfiles']),
}
job_conf = self._prep_submit_task_job_impl(
workflow,
itask,
rtconfig,
)
itask.jobs.append(job_conf)

local_job_file_path = get_task_job_job_log(
Expand Down Expand Up @@ -1326,8 +1324,6 @@ def get_job_conf(
'try_num': itask.get_try_num(),
'uuid_str': self.task_events_mgr.uuid_str,
'work_d': rtconfig['work sub-directory'],
# this field is populated retrospectively for regular job subs
'logfiles': [],
}

def get_simulation_job_conf(self, itask, workflow):
Expand Down Expand Up @@ -1359,6 +1355,4 @@ def get_simulation_job_conf(self, itask, workflow):
'try_num': itask.get_try_num(),
'uuid_str': self.task_events_mgr.uuid_str,
'work_d': 'SIMULATION',
# this field is populated retrospectively for regular job subs
'logfiles': [],
}
3 changes: 0 additions & 3 deletions cylc/flow/task_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,6 @@ class TaskProxy:
Jobs' platform by submit number.
label (str):
The .point attribute as string.
logfiles (list):
List of names of (extra) known job log files.
name (str):
Same as the .tdef.name attribute.
started_time (float):
Expand Down Expand Up @@ -225,7 +223,6 @@ def __init__(
'started_time_string': None,
'finished_time': None,
'finished_time_string': None,
'logfiles': [],
'platforms_used': {},
'execution_time_limit': None,
'job_runner_name': None,
Expand Down
1 change: 0 additions & 1 deletion tests/integration/test_data_store_mgr.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,6 @@ def job_config(schd):
'directives': {},
'environment': {},
'param_var': {},
'logfiles': [],
'platform': {'name': 'platform'},
}

Expand Down
1 change: 0 additions & 1 deletion tests/integration/test_graphql.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ def job_config(schd):
'directives': {},
'environment': {},
'param_var': {},
'logfiles': [],
'platform': {'name': 'platform'},
}

Expand Down
Loading