diff --git a/src/qtoolkit/io/base.py b/src/qtoolkit/io/base.py index 833d8ac..414afae 100644 --- a/src/qtoolkit/io/base.py +++ b/src/qtoolkit/io/base.py @@ -48,6 +48,8 @@ class BaseSchedulerIO(QTKObject, abc.ABC): shebang: str = "#!/bin/bash" + sanitize_job_name: bool = False + def get_submission_script( self, commands: str | list[str], @@ -106,6 +108,7 @@ def generate_header(self, options: dict | QResources | None) -> str: msg += f" {replacements} instead of '{extra_val}'." raise ValueError(msg) + options = self.sanitize_options(options) unclean_header = template.safe_substitute(options) # Remove lines with leftover $$. clean_header = [] @@ -241,3 +244,10 @@ def _get_jobs_list_cmd( @abc.abstractmethod def parse_jobs_list_output(self, exit_code, stdout, stderr) -> list[QJob]: pass + + def sanitize_options(self, options): + """ + A function to sanitize the values in the options used to generate the + header. Subclasses should implement their own sanitizations. + """ + return options diff --git a/src/qtoolkit/io/pbs.py b/src/qtoolkit/io/pbs.py index eec2ea0..5640869 100644 --- a/src/qtoolkit/io/pbs.py +++ b/src/qtoolkit/io/pbs.py @@ -117,6 +117,10 @@ def parse_job_output(self, exit_code, stdout, stderr) -> QJob | None: def _get_qstat_base_command(self) -> list[str]: return ["qstat", "-f"] + def _get_job_cmd(self, job_id: str): + cmd = f"qstat -f {job_id}" + return cmd + def _get_job_ids_flag(self, job_ids_str: str) -> str: return job_ids_str @@ -251,3 +255,9 @@ def _convert_str_to_time(time_str: str | None): raise OutputParsingError() return time[3] * 86400 + time[2] * 3600 + time[1] * 60 + time[0] + + def sanitize_options(self, options): + if "job_name" in options: + options = dict(options) + options["job_name"] = re.sub(r"[^a-zA-Z0-9_\-+.]", "_", options["job_name"]) + return options diff --git a/src/qtoolkit/io/pbs_base.py b/src/qtoolkit/io/pbs_base.py index d414dfa..21d2bc9 100644 --- a/src/qtoolkit/io/pbs_base.py +++ b/src/qtoolkit/io/pbs_base.py @@ -226,7 +226,6 @@ def _convert_qresources(self, resources: QResources) -> dict: return header_dict - @abc.abstractmethod def _add_soft_walltime(self, header_dict: dict, resources: QResources): """Add soft_walltime if required by child classes (SGE).""" diff --git a/src/qtoolkit/io/sge.py b/src/qtoolkit/io/sge.py index 19bb38c..805dae2 100644 --- a/src/qtoolkit/io/sge.py +++ b/src/qtoolkit/io/sge.py @@ -364,3 +364,9 @@ def supported_qresources_keys(self) -> list: supported = super().supported_qresources_keys supported += ["memory_per_thread", "gpus_per_job"] return supported + + def sanitize_options(self, options): + if "job_name" in options: + options = dict(options) + options["job_name"] = re.sub(r"[\s/@*\\:]", "_", options["job_name"]) + return options diff --git a/tests/io/test_pbs.py b/tests/io/test_pbs.py new file mode 100644 index 0000000..87343f3 --- /dev/null +++ b/tests/io/test_pbs.py @@ -0,0 +1,317 @@ +from datetime import timedelta +from pathlib import Path + +import pytest +from monty.serialization import loadfn + +from qtoolkit.core.data_objects import ProcessPlacement, QResources, QState +from qtoolkit.core.exceptions import OutputParsingError, UnsupportedResourcesError +from qtoolkit.io.pbs import PBSIO, PBSState + +TEST_DIR = Path(__file__).resolve().parents[1] / "test_data" +submit_ref_file = TEST_DIR / "io" / "pbs" / "parse_submit_output_inout.yaml" +in_out_submit_ref_list = loadfn(submit_ref_file) +cancel_ref_file = TEST_DIR / "io" / "pbs" / "parse_cancel_output_inout.yaml" +in_out_cancel_ref_list = loadfn(cancel_ref_file) +job_ref_file = TEST_DIR / "io" / "pbs" / "parse_job_output_inout.yaml" +in_out_job_ref_list = loadfn(job_ref_file) + + +@pytest.fixture(scope="module") +def pbs_io(): + return PBSIO() + + +@pytest.fixture() # scope="session") +def maximalist_qresources_pbs(): + """A set of QResources options that try to make use of most features""" + from qtoolkit.core.data_objects import QResources + + return QResources( + queue_name="test_queue", + job_name="test_job", + nodes=1, + processes=1, + processes_per_node=1, + threads_per_process=1, + time_limit=100, + account="test_account", + qos="test_qos", + priority=1, + output_filepath="test_output_filepath", + error_filepath="test_error_filepath", + process_placement="no_constraints", + email_address="test_email_address@email.address", + rerunnable=True, + project="test_project", + njobs=1, + ) + + +class TestPBSState: + @pytest.mark.parametrize("sge_state", [s for s in PBSState]) + def test_qstate(self, sge_state): + assert isinstance(sge_state.qstate, QState) + + def test_instance(self): + assert PBSState("H") == PBSState.HELD + assert PBSState("R") == PBSState.RUNNING + assert PBSState("Q") == PBSState.QUEUED + assert PBSState("E") == PBSState.EXITING + + +class TestPBSIO: + @pytest.mark.parametrize("in_out_ref", in_out_submit_ref_list) + def test_parse_submit_output(self, pbs_io, in_out_ref, test_utils): + parse_cmd_output, sr_ref = test_utils.inkwargs_outref( + in_out_ref, inkey="parse_submit_kwargs", outkey="submission_result_ref" + ) + sr = pbs_io.parse_submit_output(**parse_cmd_output) + print(sr, sr_ref) + assert sr == sr_ref + sr = pbs_io.parse_submit_output( + exit_code=parse_cmd_output["exit_code"], + stdout=bytes(parse_cmd_output["stdout"], "utf-8"), + stderr=bytes(parse_cmd_output["stderr"], "utf-8"), + ) + assert sr == sr_ref + sr = pbs_io.parse_submit_output( + exit_code=parse_cmd_output["exit_code"], + stdout=bytes(parse_cmd_output["stdout"], "ascii"), + stderr=bytes(parse_cmd_output["stderr"], "ascii"), + ) + assert sr == sr_ref + + @pytest.mark.parametrize("in_out_ref", in_out_cancel_ref_list) + def test_parse_cancel_output(self, pbs_io, in_out_ref, test_utils): + parse_cmd_output, cr_ref = test_utils.inkwargs_outref( + in_out_ref, inkey="parse_cancel_kwargs", outkey="cancel_result_ref" + ) + cr = pbs_io.parse_cancel_output(**parse_cmd_output) + assert cr == cr_ref + cr = pbs_io.parse_cancel_output( + exit_code=parse_cmd_output["exit_code"], + stdout=bytes(parse_cmd_output["stdout"], "utf-8"), + stderr=bytes(parse_cmd_output["stderr"], "utf-8"), + ) + assert cr == cr_ref + cr = pbs_io.parse_cancel_output( + exit_code=parse_cmd_output["exit_code"], + stdout=bytes(parse_cmd_output["stdout"], "ascii"), + stderr=bytes(parse_cmd_output["stderr"], "ascii"), + ) + assert cr == cr_ref + + @pytest.mark.parametrize("in_out_ref", in_out_job_ref_list) + def test_parse_job_output(self, pbs_io, in_out_ref, test_utils): + parse_cmd_output, job_ref = test_utils.inkwargs_outref( + in_out_ref, inkey="parse_job_kwargs", outkey="job_ref" + ) + if "stderr" not in parse_cmd_output: + parse_cmd_output["stderr"] = "" + job = pbs_io.parse_job_output(**parse_cmd_output) + assert job == job_ref + job = pbs_io.parse_job_output( + exit_code=parse_cmd_output["exit_code"], + stdout=bytes(parse_cmd_output["stdout"], "utf-8"), + stderr=bytes(parse_cmd_output["stderr"], "utf-8"), + ) + assert job == job_ref + job = pbs_io.parse_job_output( + exit_code=parse_cmd_output["exit_code"], + stdout=bytes(parse_cmd_output["stdout"], "ascii"), + stderr=bytes(parse_cmd_output["stderr"], "ascii"), + ) + assert job == job_ref + + def test_get_job_cmd(self, pbs_io): + cmd = pbs_io._get_job_cmd(3) + assert cmd == "qstat -f 3" + cmd = pbs_io._get_job_cmd("56") + assert cmd == "qstat -f 56" + + def test_get_jobs_list_cmd(self, pbs_io): + with pytest.raises( + ValueError, match=r"Cannot query by user and job\(s\) in PBS" + ): + pbs_io._get_jobs_list_cmd(job_ids=["1"], user="johndoe") + cmd = pbs_io._get_jobs_list_cmd(user="johndoe") + assert cmd == "qstat -f -u johndoe" + cmd = pbs_io._get_jobs_list_cmd(job_ids=["1", "3", "56", "15"]) + assert cmd == "qstat -f 1,3,56,15" + cmd = pbs_io._get_jobs_list_cmd(job_ids=["1"]) + assert cmd == "qstat -f 1" + + def test_convert_str_to_time(self, pbs_io): + time_seconds = pbs_io._convert_str_to_time("10:51:13") + assert time_seconds == 39073 + time_seconds = pbs_io._convert_str_to_time("02:10:02") + assert time_seconds == 7802 + time_seconds = pbs_io._convert_str_to_time("10:02") + assert time_seconds == 602 + time_seconds = pbs_io._convert_str_to_time("45") + assert time_seconds == 45 + + with pytest.raises(OutputParsingError): + pbs_io._convert_str_to_time("2:10:a") + + def test_convert_memory_str(self, pbs_io): + assert isinstance(pbs_io, PBSIO) + memory_kb = pbs_io._convert_memory_str(None) + assert memory_kb is None + memory_kb = pbs_io._convert_memory_str("") + assert memory_kb is None + + memory_kb = pbs_io._convert_memory_str("12mb") + assert memory_kb == 12288 + memory_kb = pbs_io._convert_memory_str("13kb") + assert memory_kb == 13 + memory_kb = pbs_io._convert_memory_str("5gb") + assert memory_kb == 5242880 + memory_kb = pbs_io._convert_memory_str("1tb") + assert memory_kb == 1073741824 + + with pytest.raises(OutputParsingError): + pbs_io._convert_memory_str("aT") + + def test_convert_time_to_str(self, pbs_io): + time_str = pbs_io._convert_time_to_str(10) + assert time_str == "0:0:10" + time_str = pbs_io._convert_time_to_str(39073) + assert time_str == "10:51:13" + time_str = pbs_io._convert_time_to_str(7802) + assert time_str == "2:10:2" + time_str = pbs_io._convert_time_to_str(602) + assert time_str == "0:10:2" + + time_str = pbs_io._convert_time_to_str(timedelta(seconds=39073)) + assert time_str == "10:51:13" + time_str = pbs_io._convert_time_to_str( + timedelta(hours=15, minutes=19, seconds=32) + ) + assert time_str == "15:19:32" + + # test float + time_str = pbs_io._convert_time_to_str(602.0) + assert time_str == "0:10:2" + + # test negative + # negative time makes no sense and should not be passed. this test is just to be alerted + # if the output for negative numbers changes + time_str = pbs_io._convert_time_to_str(-10) + assert time_str == "-1:59:50" + + def test_check_convert_qresources(self, pbs_io): + res = QResources( + queue_name="myqueue", + job_name="myjob", + priority=1, + output_filepath="someoutputpath", + error_filepath="someerrorpath", + njobs=4, + time_limit=39073, + process_placement=ProcessPlacement.EVENLY_DISTRIBUTED, + nodes=4, + processes_per_node=3, + threads_per_process=2, + email_address="john.doe@submit.qtk", + scheduler_kwargs={"tata": "toto", "titi": "tutu"}, + ) + header_dict = pbs_io.check_convert_qresources(resources=res) + assert header_dict == { + "queue": "myqueue", + "job_name": "myjob", + "place": "scatter", + "priority": 1, + "qout_path": "someoutputpath", + "qerr_path": "someerrorpath", + "array": "1-4", + "walltime": "10:51:13", + "select": "select=4:ncpus=6:mpiprocs=3:ompthreads=2", + "mail_user": "john.doe@submit.qtk", + "mail_type": "abe", + "tata": "toto", + "titi": "tutu", + } + + res = QResources( + time_limit=39073, + processes=24, + ) + header_dict = pbs_io.check_convert_qresources(resources=res) + assert header_dict == { + "walltime": "10:51:13", + "select": "select=24", # also not sure about this + } + + res = QResources( + njobs=1, + processes=24, + ) + header_dict = pbs_io.check_convert_qresources(resources=res) + assert header_dict == { + "select": "select=24", + } + + res = QResources( + processes=5, + rerunnable=True, + ) + with pytest.raises( + UnsupportedResourcesError, match=r"Keys not supported: rerunnable" + ): + pbs_io.check_convert_qresources(res) + + def test_submission_script(self, pbs_io, maximalist_qresources_pbs): + # remove unsupported SGE options + maximalist_qresources_pbs.rerunnable = None + maximalist_qresources_pbs.project = None + maximalist_qresources_pbs.account = None + maximalist_qresources_pbs.qos = None + maximalist_qresources_pbs.process_placement = ( + ProcessPlacement.EVENLY_DISTRIBUTED + ) + + # Set `processes` to None to avoid the conflict + maximalist_qresources_pbs.processes = None + + # generate the SGE submission script + script_qresources = pbs_io.get_submission_script( + commands=["ls -l"], options=maximalist_qresources_pbs + ) + + # assert the correctness of the generated script + assert ( + script_qresources.split("\n") + == """#!/bin/bash + +#PBS -q test_queue +#PBS -N test_job +#PBS -l select=1:ncpus=1:mpiprocs=1 +#PBS -l walltime=0:1:40 +#PBS -l place=scatter +#PBS -M test_email_address@email.address +#PBS -m abe +#PBS -o test_output_filepath +#PBS -e test_error_filepath +#PBS -p 1 +ls -l""".split( + "\n" + ) + ) + + def test_sanitize_options(self, pbs_io): + script = pbs_io.get_submission_script( + commands=["ls -l"], options={"job_name": "test-_@/*"} + ) + assert "#PBS -N test-____" in script + + script = pbs_io.get_submission_script( + commands=["ls -l"], options={"job_name": "test-_test"} + ) + assert "#PBS -N test-_test" in script + + script = pbs_io.get_submission_script( + commands=["ls -l"], options={"job_name": "test -_!#$test"} + ) + assert "#PBS -N test_-____test" in script diff --git a/tests/io/test_sge.py b/tests/io/test_sge.py index efe354e..622a515 100644 --- a/tests/io/test_sge.py +++ b/tests/io/test_sge.py @@ -276,3 +276,19 @@ def test_submission_script(self, sge_io, maximalist_qresources): "\n" ) ) + + def test_sanitize_options(self, sge_io): + script = sge_io.get_submission_script( + commands=["ls -l"], options={"job_name": "test-_@/*"} + ) + assert "#$ -N test-____" in script + + script = sge_io.get_submission_script( + commands=["ls -l"], options={"job_name": "test-_test"} + ) + assert "#$ -N test-_test" in script + + script = sge_io.get_submission_script( + commands=["ls -l"], options={"job_name": "test -_!#$test"} + ) + assert "#$ -N test_-_!#$test" in script diff --git a/tests/test_data/io/pbs/create_parse_cancel_output_inout.py b/tests/test_data/io/pbs/create_parse_cancel_output_inout.py new file mode 100644 index 0000000..c9a147d --- /dev/null +++ b/tests/test_data/io/pbs/create_parse_cancel_output_inout.py @@ -0,0 +1,105 @@ +import json + +import yaml + +from qtoolkit.io.pbs import PBSIO + +pbs_io = PBSIO() + +mylist = [] + +# First case: successful termination +return_code = 0 +stdout = b"" +stderr = b"" + +cr = pbs_io.parse_cancel_output(exit_code=return_code, stdout=stdout, stderr=stderr) + +a = { + "parse_cancel_kwargs": json.dumps( + {"exit_code": return_code, "stdout": stdout.decode(), "stderr": stderr.decode()} + ), + "cancel_result_ref": json.dumps(cr.as_dict()), +} +mylist.append(a) + +# Second case: no job identification provided +return_code = 1 +stdout = b"" +stderr = b"""usage: + qdel [-W force|suppress_email=X] [-x] job_identifier... + qdel --version +""" + +cr = pbs_io.parse_cancel_output(exit_code=return_code, stdout=stdout, stderr=stderr) + +a = { + "parse_cancel_kwargs": json.dumps( + {"exit_code": return_code, "stdout": stdout.decode(), "stderr": stderr.decode()} + ), + "cancel_result_ref": json.dumps(cr.as_dict()), +} +mylist.append(a) + +# Third case: access/permission denied +return_code = 210 +stdout = b"" +stderr = b"qdel: Unauthorized Request 210\n" + +cr = pbs_io.parse_cancel_output(exit_code=return_code, stdout=stdout, stderr=stderr) + +a = { + "parse_cancel_kwargs": json.dumps( + {"exit_code": return_code, "stdout": stdout.decode(), "stderr": stderr.decode()} + ), + "cancel_result_ref": json.dumps(cr.as_dict()), +} +mylist.append(a) + +# Fourth case: invalid job id +return_code = 1 +stdout = b"" +stderr = b"qdel: illegally formed job identifier: a\n" + +cr = pbs_io.parse_cancel_output(exit_code=return_code, stdout=stdout, stderr=stderr) + +a = { + "parse_cancel_kwargs": json.dumps( + {"exit_code": return_code, "stdout": stdout.decode(), "stderr": stderr.decode()} + ), + "cancel_result_ref": json.dumps(cr.as_dict()), +} +mylist.append(a) + +# Fifth case: job already completed +return_code = 1 +stdout = b"" +stderr = b"qdel: Job has finished 8\n" + +cr = pbs_io.parse_cancel_output(exit_code=return_code, stdout=stdout, stderr=stderr) + +a = { + "parse_cancel_kwargs": json.dumps( + {"exit_code": return_code, "stdout": stdout.decode(), "stderr": stderr.decode()} + ), + "cancel_result_ref": json.dumps(cr.as_dict()), +} +mylist.append(a) + +# Sixth case: unkwnown job id +return_code = 1 +stdout = b"" +stderr = b"qdel: Unknown Job Id 120\n" + +cr = pbs_io.parse_cancel_output(exit_code=return_code, stdout=stdout, stderr=stderr) + +a = { + "parse_cancel_kwargs": json.dumps( + {"exit_code": return_code, "stdout": stdout.decode(), "stderr": stderr.decode()} + ), + "cancel_result_ref": json.dumps(cr.as_dict()), +} +mylist.append(a) + +with open("parse_cancel_output_inout.yaml", "w") as f: + yaml.dump(mylist, f, sort_keys=False) diff --git a/tests/test_data/io/pbs/create_parse_job_output_inout.py b/tests/test_data/io/pbs/create_parse_job_output_inout.py new file mode 100644 index 0000000..4e293b4 --- /dev/null +++ b/tests/test_data/io/pbs/create_parse_job_output_inout.py @@ -0,0 +1,88 @@ +import json + +import yaml + +from qtoolkit.io.pbs import PBSIO + +pbs_io = PBSIO() + +mylist = [] + +# First case: successful job parsing +return_code = 0 +stdout = b"""Job Id: 14 + Job_Name = myscript_1 + Job_Owner = testu@f41a0fbae027 + resources_used.cpupercent = 0 + resources_used.cput = 00:00:00 + resources_used.mem = 0kb + resources_used.ncpus = 1 + resources_used.vmem = 0kb + resources_used.walltime = 00:00:00 + job_state = R + queue = workq + server = f41a0fbae027 + Checkpoint = u + ctime = Sun Dec 29 20:13:12 2024 + Error_Path = f41a0fbae027:/home/testu/myscript_1.e14 + exec_host = f41a0fbae027/0 + exec_vnode = (f41a0fbae027:ncpus=1) + Hold_Types = n + Join_Path = n + Keep_Files = n + Mail_Points = a + mtime = Sun Dec 29 20:13:14 2024 + Output_Path = f41a0fbae027:/home/testu/myscript_1.o14 + Priority = 0 + qtime = Sun Dec 29 20:13:12 2024 + Rerunable = True + Resource_List.ncpus = 1 + Resource_List.nodect = 1 + Resource_List.nodes = 1:ppn=1 + Resource_List.place = scatter + Resource_List.select = 1:ncpus=1 + Resource_List.walltime = 01:00:00 + stime = Sun Dec 29 20:13:12 2024 + session_id = 1534 + Shell_Path_List = /bin/bash + jobdir = /home/testu + substate = 42 + Variable_List = PBS_O_HOME=/home/testu,PBS_O_LANG=C.UTF-8, + PBS_O_LOGNAME=testu, + PBS_O_PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bi + n:/usr/games:/usr/local/games:/snap/bin:/opt/pbs/bin, + PBS_O_SHELL=/bin/bash,PBS_O_WORKDIR=/home/testu,PBS_O_SYSTEM=Linux, + PBS_O_QUEUE=workq,PBS_O_HOST=f41a0fbae027 + comment = Job run at Sun Dec 29 at 20:13 on (f41a0fbae027:ncpus=1) + etime = Sun Dec 29 20:13:12 2024 + run_count = 1 + Submit_arguments = test_submit.sh + project = _pbs_project_default + Submit_Host = f41a0fbae027 +""" +stderr = b"" +job = pbs_io.parse_job_output(exit_code=return_code, stdout=stdout, stderr=stderr) +a = { + "parse_job_kwargs": json.dumps( + {"exit_code": return_code, "stdout": stdout.decode(), "stderr": stderr.decode()} + ), + "job_ref": json.dumps(job.as_dict()), +} +mylist.append(a) + + +# Second case: empty stdout and stderr +return_code = 0 +stdout = b"" +stderr = b"" +job = pbs_io.parse_job_output(exit_code=return_code, stdout=stdout, stderr=stderr) +a = { + "parse_job_kwargs": json.dumps( + {"exit_code": return_code, "stdout": stdout.decode(), "stderr": stderr.decode()} + ), + "job_ref": json.dumps(job.as_dict() if job is not None else None), +} +mylist.append(a) + +with open("parse_job_output_inout.yaml", "w") as f: + yaml.dump(mylist, f, sort_keys=False) diff --git a/tests/test_data/io/pbs/create_parse_submit_output_inout.py b/tests/test_data/io/pbs/create_parse_submit_output_inout.py new file mode 100644 index 0000000..d8105e8 --- /dev/null +++ b/tests/test_data/io/pbs/create_parse_submit_output_inout.py @@ -0,0 +1,45 @@ +import json + +import yaml + +from qtoolkit.io.pbs import PBSIO + +pbs_io = PBSIO() + +mylist = [] + +# First case: invalid queue specified +return_code = 1 +stdout = b"" +stderr = b"qsub: Unknown queue\n" + +sr = pbs_io.parse_submit_output( + exit_code=return_code, stdout=stdout.decode(), stderr=stderr.decode() +) + +a = { + "parse_submit_kwargs": json.dumps( + {"exit_code": return_code, "stdout": stdout.decode(), "stderr": stderr.decode()} + ), + "submission_result_ref": json.dumps(sr.as_dict()), +} +mylist.append(a) + +# Second case: successful submission +return_code = 0 +stdout = b"24\n" +stderr = b"" +sr = pbs_io.parse_submit_output( + exit_code=return_code, stdout=stdout.decode(), stderr=stderr.decode() +) +a = { + "parse_submit_kwargs": json.dumps( + {"exit_code": return_code, "stdout": stdout.decode(), "stderr": stderr.decode()} + ), + "submission_result_ref": json.dumps(sr.as_dict()), +} +mylist.append(a) + + +with open("parse_submit_output_inout.yaml", "w") as f: + yaml.dump(mylist, f, sort_keys=False) diff --git a/tests/test_data/io/pbs/parse_cancel_output_inout.yaml b/tests/test_data/io/pbs/parse_cancel_output_inout.yaml new file mode 100644 index 0000000..996051b --- /dev/null +++ b/tests/test_data/io/pbs/parse_cancel_output_inout.yaml @@ -0,0 +1,42 @@ +- parse_cancel_kwargs: '{"exit_code": 0, "stdout": "", "stderr": ""}' + cancel_result_ref: '{"@module": "qtoolkit.core.data_objects", "@class": "CancelResult", + "@version": "0.1.5.post38+g62b683f.d20241229", "job_id": null, "step_id": null, + "exit_code": 0, "stdout": "", "stderr": "", "status": {"@module": "qtoolkit.core.data_objects", + "@class": "CancelStatus", "@version": "0.1.5.post38+g62b683f.d20241229", "value": + "SUCCESSFUL"}}' +- parse_cancel_kwargs: '{"exit_code": 1, "stdout": "", "stderr": "usage:\n qdel + [-W force|suppress_email=X] [-x] job_identifier...\n qdel --version\n"}' + cancel_result_ref: '{"@module": "qtoolkit.core.data_objects", "@class": "CancelResult", + "@version": "0.1.5.post38+g62b683f.d20241229", "job_id": null, "step_id": null, + "exit_code": 1, "stdout": "", "stderr": "usage:\n qdel [-W force|suppress_email=X] + [-x] job_identifier...\n qdel --version\n", "status": {"@module": "qtoolkit.core.data_objects", + "@class": "CancelStatus", "@version": "0.1.5.post38+g62b683f.d20241229", "value": + "FAILED"}}' +- parse_cancel_kwargs: '{"exit_code": 210, "stdout": "", "stderr": "qdel: Unauthorized + Request 210\n"}' + cancel_result_ref: '{"@module": "qtoolkit.core.data_objects", "@class": "CancelResult", + "@version": "0.1.5.post38+g62b683f.d20241229", "job_id": null, "step_id": null, + "exit_code": 210, "stdout": "", "stderr": "qdel: Unauthorized Request 210\n", + "status": {"@module": "qtoolkit.core.data_objects", "@class": "CancelStatus", + "@version": "0.1.5.post38+g62b683f.d20241229", "value": "FAILED"}}' +- parse_cancel_kwargs: '{"exit_code": 1, "stdout": "", "stderr": "qdel: illegally + formed job identifier: a\n"}' + cancel_result_ref: '{"@module": "qtoolkit.core.data_objects", "@class": "CancelResult", + "@version": "0.1.5.post38+g62b683f.d20241229", "job_id": null, "step_id": null, + "exit_code": 1, "stdout": "", "stderr": "qdel: illegally formed job identifier: + a\n", "status": {"@module": "qtoolkit.core.data_objects", "@class": "CancelStatus", + "@version": "0.1.5.post38+g62b683f.d20241229", "value": "FAILED"}}' +- parse_cancel_kwargs: '{"exit_code": 1, "stdout": "", "stderr": "qdel: Job has finished + 8\n"}' + cancel_result_ref: '{"@module": "qtoolkit.core.data_objects", "@class": "CancelResult", + "@version": "0.1.5.post38+g62b683f.d20241229", "job_id": null, "step_id": null, + "exit_code": 1, "stdout": "", "stderr": "qdel: Job has finished 8\n", "status": + {"@module": "qtoolkit.core.data_objects", "@class": "CancelStatus", "@version": + "0.1.5.post38+g62b683f.d20241229", "value": "FAILED"}}' +- parse_cancel_kwargs: '{"exit_code": 1, "stdout": "", "stderr": "qdel: Unknown Job + Id 120\n"}' + cancel_result_ref: '{"@module": "qtoolkit.core.data_objects", "@class": "CancelResult", + "@version": "0.1.5.post38+g62b683f.d20241229", "job_id": null, "step_id": null, + "exit_code": 1, "stdout": "", "stderr": "qdel: Unknown Job Id 120\n", "status": + {"@module": "qtoolkit.core.data_objects", "@class": "CancelStatus", "@version": + "0.1.5.post38+g62b683f.d20241229", "value": "FAILED"}}' diff --git a/tests/test_data/io/pbs/parse_job_output_inout.yaml b/tests/test_data/io/pbs/parse_job_output_inout.yaml new file mode 100644 index 0000000..f02ff13 --- /dev/null +++ b/tests/test_data/io/pbs/parse_job_output_inout.yaml @@ -0,0 +1,29 @@ +- parse_job_kwargs: '{"exit_code": 0, "stdout": "Job Id: 14\n Job_Name = myscript_1\n Job_Owner + = testu@f41a0fbae027\n resources_used.cpupercent = 0\n resources_used.cput + = 00:00:00\n resources_used.mem = 0kb\n resources_used.ncpus = 1\n resources_used.vmem + = 0kb\n resources_used.walltime = 00:00:00\n job_state = R\n queue = + workq\n server = f41a0fbae027\n Checkpoint = u\n ctime = Sun Dec 29 20:13:12 + 2024\n Error_Path = f41a0fbae027:/home/testu/myscript_1.e14\n exec_host + = f41a0fbae027/0\n exec_vnode = (f41a0fbae027:ncpus=1)\n Hold_Types = n\n Join_Path + = n\n Keep_Files = n\n Mail_Points = a\n mtime = Sun Dec 29 20:13:14 + 2024\n Output_Path = f41a0fbae027:/home/testu/myscript_1.o14\n Priority + = 0\n qtime = Sun Dec 29 20:13:12 2024\n Rerunable = True\n Resource_List.ncpus + = 1\n Resource_List.nodect = 1\n Resource_List.nodes = 1:ppn=1\n Resource_List.place + = scatter\n Resource_List.select = 1:ncpus=1\n Resource_List.walltime = + 01:00:00\n stime = Sun Dec 29 20:13:12 2024\n session_id = 1534\n Shell_Path_List + = /bin/bash\n jobdir = /home/testu\n substate = 42\n Variable_List = + PBS_O_HOME=/home/testu,PBS_O_LANG=C.UTF-8,\n PBS_O_LOGNAME=testu,\n PBS_O_PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bi\n n:/usr/games:/usr/local/games:/snap/bin:/opt/pbs/bin,\n PBS_O_SHELL=/bin/bash,PBS_O_WORKDIR=/home/testu,PBS_O_SYSTEM=Linux,\n PBS_O_QUEUE=workq,PBS_O_HOST=f41a0fbae027\n comment + = Job run at Sun Dec 29 at 20:13 on (f41a0fbae027:ncpus=1)\n etime = Sun Dec + 29 20:13:12 2024\n run_count = 1\n Submit_arguments = test_submit.sh\n project + = _pbs_project_default\n Submit_Host = f41a0fbae027\n", "stderr": ""}' + job_ref: '{"@module": "qtoolkit.core.data_objects", "@class": "QJob", "@version": + "0.1.5.post38+g62b683f.d20241229", "name": "myscript_1", "job_id": "14", "exit_status": + null, "state": {"@module": "qtoolkit.core.data_objects", "@class": "QState", "@version": + "0.1.5.post38+g62b683f.d20241229", "value": "RUNNING"}, "sub_state": {"@module": + "qtoolkit.io.pbs", "@class": "PBSState", "@version": "0.1.5.post38+g62b683f.d20241229", + "value": "R"}, "info": {"@module": "qtoolkit.core.data_objects", "@class": "QJobInfo", + "@version": "0.1.5.post38+g62b683f.d20241229", "memory": null, "memory_per_cpu": + null, "nodes": 1, "cpus": 1, "threads_per_process": null, "time_limit": 3600}, + "account": null, "runtime": 0, "queue_name": null}' +- parse_job_kwargs: '{"exit_code": 0, "stdout": "", "stderr": ""}' + job_ref: 'null' diff --git a/tests/test_data/io/pbs/parse_submit_output_inout.yaml b/tests/test_data/io/pbs/parse_submit_output_inout.yaml new file mode 100644 index 0000000..c46fd65 --- /dev/null +++ b/tests/test_data/io/pbs/parse_submit_output_inout.yaml @@ -0,0 +1,12 @@ +- parse_submit_kwargs: '{"exit_code": 1, "stdout": "", "stderr": "qsub: Unknown queue\n"}' + submission_result_ref: '{"@module": "qtoolkit.core.data_objects", "@class": "SubmissionResult", + "@version": "0.1.5.post38+g62b683f.d20241229", "job_id": null, "step_id": null, + "exit_code": 1, "stdout": "", "stderr": "qsub: Unknown queue\n", "status": {"@module": + "qtoolkit.core.data_objects", "@class": "SubmissionStatus", "@version": "0.1.5.post38+g62b683f.d20241229", + "value": "FAILED"}}' +- parse_submit_kwargs: '{"exit_code": 0, "stdout": "24\n", "stderr": ""}' + submission_result_ref: '{"@module": "qtoolkit.core.data_objects", "@class": "SubmissionResult", + "@version": "0.1.5.post38+g62b683f.d20241229", "job_id": "24", "step_id": null, + "exit_code": 0, "stdout": "24\n", "stderr": "", "status": {"@module": "qtoolkit.core.data_objects", + "@class": "SubmissionStatus", "@version": "0.1.5.post38+g62b683f.d20241229", "value": + "SUCCESSFUL"}}'