Skip to content

Commit

Permalink
Added first test and fixed some parts of patch_metadata
Browse files Browse the repository at this point in the history
Remove breakpoint

pre-commit
  • Loading branch information
ryuwd authored and aldbr committed Mar 6, 2025
1 parent d8d6f7a commit ede0868
Show file tree
Hide file tree
Showing 3 changed files with 71 additions and 6 deletions.
5 changes: 4 additions & 1 deletion diracx-logic/src/diracx/logic/jobs/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,10 @@ async def search(
if query_logging_info := ("LoggingInfo" in (body.parameters or [])):
if body.parameters:
body.parameters.remove("LoggingInfo")
body.parameters = ["JobID"] + (body.parameters or [])
if not body.parameters:
body.parameters = None
else:
body.parameters = ["JobID"] + (body.parameters or [])

# TODO: Apply all the job policy stuff properly using user_info
if not config.Operations["Defaults"].Services.JobMonitoring.GlobalJobsInfo:
Expand Down
9 changes: 4 additions & 5 deletions diracx-logic/src/diracx/logic/jobs/status.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,13 @@
VectorSearchSpec,
)
from diracx.db.os.job_parameters import JobParametersDB
from diracx.db.sql.job.db import JobDB
from diracx.db.sql.job.db import JobDB, _get_columns
from diracx.db.sql.job.schema import Jobs
from diracx.db.sql.job_logging.db import JobLoggingDB
from diracx.db.sql.sandbox_metadata.db import SandboxMetadataDB
from diracx.db.sql.task_queue.db import TaskQueueDB
from diracx.logic.jobs.utils import check_and_prepare_job
from diracx.logic.task_queues.priority import recalculate_tq_shares_for_entity
from diracx.db.sql.job.db import _get_columns
from diracx.db.sql.job.schema import Jobs

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -506,12 +505,12 @@ async def set_job_parameters_or_attributes(
if pname.lower() not in possible_attribute_columns
}
# bulk set job attributes
await job_db.set_job_attributes_bulk(attr_updates)
await job_db.set_job_attributes(attr_updates)

# TODO: can we upsert to multiple documents?
for job_id, p_updates_ in param_updates.items():
if p_updates_:
await job_parameters_db.upsert(
int(job_id),
p_updates_,
)
)
63 changes: 63 additions & 0 deletions diracx-routers/tests/jobs/test_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -941,3 +941,66 @@ def test_remove_jobs_valid_job_ids(
# for job_id in valid_job_ids:
# r = normal_user_client.get(f"/api/jobs/{job_id}/status")
# assert r.status_code == HTTPStatus.NOT_FOUND, r.json()

def test_patch_metadata(normal_user_client: TestClient, valid_job_id: int):
# Arrange
r = normal_user_client.post(
"/api/jobs/search",
json={
"search": [
{
"parameter": "JobID",
"operator": "eq",
"value": valid_job_id,
}
],
"parameters": ["LoggingInfo"],
},
)

assert r.status_code == 200, r.json()
for j in r.json():
assert j["JobID"] == valid_job_id
assert j["Status"] == JobStatus.RECEIVED.value
assert j["MinorStatus"] == "Job accepted"
assert j["ApplicationStatus"] == "Unknown"

# Act
hbt = str(datetime.now(timezone.utc))
r = normal_user_client.patch(
"/api/jobs/metadata",
json={
valid_job_id: {
"UserPriority": 2,
"HeartBeatTime": hbt,
# set a parameter
"JobType": "VerySpecialIndeed",
}
},
)

# Assert
assert (
r.status_code == 204
), "PATCH metadata should return 204 No Content on success"
r = normal_user_client.post(
"/api/jobs/search",
json={
"search": [
{
"parameter": "JobID",
"operator": "eq",
"value": valid_job_id,
}
],
"parameters": ["LoggingInfo"],
},
)
assert r.status_code == 200, r.json()

assert r.json()[0]["JobID"] == valid_job_id
assert r.json()[0]["JobType"] == "VerySpecialIndeed"
assert datetime.fromisoformat(
r.json()[0]["HeartBeatTime"]
) == datetime.fromisoformat(hbt)
assert r.json()[0]["UserPriority"] == 2

0 comments on commit ede0868

Please sign in to comment.