Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • calibration/pycalibration
1 result
Show changes
......@@ -12,6 +12,7 @@ import webservice # noqa: import not at top of file
from webservice.messages import MigrationError # noqa: import not at top
from webservice.webservice import ( # noqa: import not at top of file
check_files,
check_run_type_skip,
merge,
parse_config,
run_action,
......@@ -29,6 +30,14 @@ VALID_BEAMTIME = {
"id": 772,
}
FUTURE_BEAMTIME = {
"begin_at": (dt.datetime.today() + dt.timedelta(days=2)).isoformat(),
"description": None,
"end_at": (dt.datetime.today() + dt.timedelta(days=7)).isoformat(),
"flg_available": True,
"id": 772,
}
INVALID_BEAMTIME = {
"begin_at": "1818-05-05T00:00:00.000+02:00",
"description": "Karl",
......@@ -261,6 +270,7 @@ async def test_run_action(mode, cmd, retcode, expected, monkeypatch):
'proposal_number, action, mock_proposal_status, mock_beamtimes, expected_result',
[
(42, 'correct', 'A', [INVALID_BEAMTIME, VALID_BEAMTIME], 'upex-middle'), # active
(42, 'correct', 'A', [INVALID_BEAMTIME, FUTURE_BEAMTIME], 'upex-middle'), # active
('42', 'dark', 'R', [INVALID_BEAMTIME, VALID_BEAMTIME], 'upex-high'), # active
(404, 'correct', 'FR', [INVALID_BEAMTIME, VALID_BEAMTIME], 'exfel'), # finished and reviewed
(404, 'dark', 'CLO', [INVALID_BEAMTIME, VALID_BEAMTIME], 'exfel'), # closed
......@@ -379,3 +389,60 @@ async def test_get_slurm_nice_fails(fp):
assert await get_slurm_nice('exfel', 'SPB', '202201') == 0
@pytest.mark.parametrize(
'run_type, experiment_type_id, should_skip',
[
('JF Darks', 11, True),
('AGIPD dark HG', 11, True),
('Darks JF1', 11, True),
('JF1', 11, True),
('Calibration - Dark LG', 11, True),
('Custom dark experiment type', 11, True),
('JF', 0, False),
('Darks', 0, False),
('something', 0, False),
('Darkness experiment', 0, False),
]
)
@pytest.mark.asyncio
async def test_skip_runs(run_type: str, experiment_type_id:int, should_skip: bool):
res_run_by_id = mock.Mock()
res_run_by_id.status_code = 200
res_run_by_id.json = lambda: {
"data_groups_repositories": [{"experiment": {"name": run_type, "id": 0}}]
}
res_experiment_by_id = mock.Mock()
res_experiment_by_id.status_code = 200
res_experiment_by_id.json = lambda: {"experiment_type_id": experiment_type_id}
client = mock.Mock()
client.get_run_by_id_api = mock.Mock(return_value=res_run_by_id)
client.get_experiment_by_id_api = mock.Mock(return_value=res_experiment_by_id)
ret = await check_run_type_skip(client, "correct", 0)
assert ret == should_skip
@pytest.mark.parametrize(
'return_value, status_code',
[
({}, 200), ({}, 404), ({}, 500),
]
)
@pytest.mark.asyncio
async def test_skip_runs_exception(return_value, status_code, caplog):
caplog.set_level(logging.INFO)
response = mock.Mock()
response.status_code = status_code
response.json = lambda: return_value
client = mock.Mock()
client.get_run_by_id_api = mock.Mock(return_value=response)
ret = await check_run_type_skip(client, "correct", 0)
# If there is a key error, it should be caught and a warning logged instead
assert "run information does not contain expected key" in caplog.text
# And `False` should be returned
assert ret == False
......@@ -36,6 +36,14 @@ correct:
commissioning-penalty: 1250
commissioning-max-age-days: 3
job-penalty: 2
skip-run-types:
[
"AGIPD dark (LG|MG|HG)",
"Calibration - Dark (LG|MG|HG)",
"(Darks )?JF(0|1|2)",
"(Low|Medium|High) gain",
"(JF|LPD) Darks",
]
cmd: >-
python -m xfel_calibrate.calibrate {detector} CORRECT
--slurm-scheduling {sched_prio}
......
class Errors:
REQUEST_FAILED = "FAILED: request could not be parsed, please contact det-support@xfel.eu"
RUN_SKIPPED = "WARN: run at {} is marked to be skipped for calibration."
REQUEST_MALFORMED = "FAILED: request {} is malformed, please contact det-support@xfel.eu"
UNKNOWN_ACTION = "FAILED: action {} is not known!, please contact det-support@xfel.eu"
PATH_NOT_FOUND = "FAILED: run at {} not found!, please contact det-support@xfel.eu"
......
......@@ -4,6 +4,7 @@ import os
import shlex
import sqlite3
from datetime import datetime, timezone
from dateutil.parser import parse as parse_datetime
from http.server import BaseHTTPRequestHandler, HTTPServer
from pathlib import Path
from shutil import copyfileobj
......@@ -257,6 +258,8 @@ class RequestHandler(BaseHTTPRequestHandler):
f"{in_folder}/r{run:04d}/*{mp}*.h5"):
tsize += os.stat(f).st_size
timestamp = parse_datetime(timestamp).strftime('%Y-%m-%d %H:%M:%S')
last_chars[key] = {"in_path": in_folder,
"out_path": out_folder,
"runs": runs,
......@@ -294,9 +297,10 @@ class RequestHandler(BaseHTTPRequestHandler):
pdfs.sort(key=os.path.getmtime, reverse=True)
pdfs = {p.split("/")[-1]: p for p in pdfs}
timestamp = parse_datetime(timestamp).strftime('%Y-%m-%d %H:%M')
if not any(r[1:3] == (proposal, runs[0]) for r in inst_records):
inst_records.append((
timestamp[:-4], proposal, runs[0], pdfs
timestamp, proposal, runs[0], pdfs
))
return last_calib
......
......@@ -9,12 +9,13 @@ import json
import locale
import logging
import os
import re
import shlex
import sqlite3
import sys
import urllib.parse
from asyncio import get_event_loop, shield
from datetime import datetime, timezone
from datetime import datetime, timezone, timedelta
from getpass import getuser
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple, Union
......@@ -580,7 +581,8 @@ async def get_slurm_partition(
beamtimes = response.json().get('beamtimes', [])
now = datetime.now().timestamp()
for beamtime in beamtimes:
begin = datetime.fromisoformat(beamtime['begin_at']).timestamp()
begin = datetime.fromisoformat(beamtime['begin_at'])
begin = (begin - timedelta(days=2)).timestamp() # 2 days setup time before beamtime
end = datetime.fromisoformat(beamtime['end_at']).timestamp()
if begin <= now <= end:
active_now = True
......@@ -617,6 +619,36 @@ async def get_slurm_partition(
return partition
async def check_run_type_skip(
mdc: MetadataClient,
action: str,
run_id: int,
) -> bool:
loop = get_event_loop()
res = await shield(
loop.run_in_executor(None, mdc.get_run_by_id_api, run_id)
)
try:
experiment = res.json()["data_groups_repositories"][0]["experiment"]
run_type = experiment["name"]
if any(re.search(m, run_type) for m in config[action]["skip-run-types"]):
return True
experiment_id = experiment["id"]
res_experiment = await shield(
loop.run_in_executor(None, mdc.get_experiment_by_id_api, experiment_id)
)
# Experiment type id 11 is for dark experiments
if res_experiment.json()["experiment_type_id"] == 11:
return True
except KeyError as e:
logging.warning(f"mymdc run information does not contain expected key `{e}`")
return False
async def get_slurm_nice(partition: str, instrument: str,
cycle: Union[int, str], job_penalty: int = 2,
commissioning_penalty: int = 1250) -> int:
......@@ -990,6 +1022,12 @@ class ActionsServer:
await update_mdc_status(self.mdc, 'correct', rid, msg)
return
if await check_run_type_skip(self.mdc, "correct", rid):
msg = Errors.RUN_SKIPPED.format(rpath)
logging.warning(msg)
await update_mdc_status(self.mdc, 'correct', rid, msg)
return
ret, _ = await self.launch_jobs(
[runnr], req_id, detectors, 'correct', instrument, cycle,
proposal, request_time, rid
......