From f088aa15789cc9ffced6967cfd1c580f9d6c761d Mon Sep 17 00:00:00 2001 From: MasloMaslane Date: Thu, 5 Jun 2025 13:08:24 +0200 Subject: [PATCH 01/10] Workflow parsing validation --- src/sio3pack/exceptions.py | 14 ---- src/sio3pack/exceptions/__init__.py | 2 + src/sio3pack/exceptions/general.py | 25 ++++++ src/sio3pack/exceptions/workflow.py | 81 +++++++++++++++++++ src/sio3pack/workflow/execution/channels.py | 18 +++++ .../workflow/execution/descriptors.py | 7 +- .../workflow/execution/filesystems.py | 47 ++++++++++- .../workflow/execution/mount_namespace.py | 58 ++++++++++++- src/sio3pack/workflow/execution/process.py | 51 +++++++++++- .../workflow/execution/resource_group.py | 26 +++++- src/sio3pack/workflow/execution/stream.py | 44 ++++++++-- src/sio3pack/workflow/tasks.py | 70 ++++++++++++++-- src/sio3pack/workflow/workflow.py | 19 ++++- 13 files changed, 424 insertions(+), 38 deletions(-) delete mode 100644 src/sio3pack/exceptions.py create mode 100644 src/sio3pack/exceptions/__init__.py create mode 100644 src/sio3pack/exceptions/general.py create mode 100644 src/sio3pack/exceptions/workflow.py diff --git a/src/sio3pack/exceptions.py b/src/sio3pack/exceptions.py deleted file mode 100644 index 05d16f0..0000000 --- a/src/sio3pack/exceptions.py +++ /dev/null @@ -1,14 +0,0 @@ -class SIO3PackException(Exception): - """A wrapper for all exceptions raised by SIO3Pack.""" - - def __init__(self, message, original_exception=None): - super().__init__(message) - self.original_exception = original_exception - - -class WorkflowCreationError(Exception): - """Raised when there is an error creating a workflow.""" - - def __init__(self, message: str): - super().__init__(message) - self.message = message diff --git a/src/sio3pack/exceptions/__init__.py b/src/sio3pack/exceptions/__init__.py new file mode 100644 index 0000000..a4093ac --- /dev/null +++ b/src/sio3pack/exceptions/__init__.py @@ -0,0 +1,2 @@ +from sio3pack.exceptions.general import SIO3PackException +from sio3pack.exceptions.workflow import WorkflowCreationError, WorkflowParsingError, ParsingFailedOn diff --git a/src/sio3pack/exceptions/general.py b/src/sio3pack/exceptions/general.py new file mode 100644 index 0000000..84e99a7 --- /dev/null +++ b/src/sio3pack/exceptions/general.py @@ -0,0 +1,25 @@ +class SIO3PackException(Exception): + """A wrapper for all exceptions raised by SIO3Pack.""" + + def __init__(self, message, full_message=None): + """ + Initialize the SIO3PackException. + + :param message: A short description of the error. + :param full_message: A detailed description of the error, if available. + """ + super().__init__(message) + self.message = message + self._full_message = full_message + + def _generate_full_message(self): + """ + Generate a full message for the exception if not provided. + """ + return None + + @property + def full_message(self): + if self._full_message is None: + return self._generate_full_message() + return self._full_message diff --git a/src/sio3pack/exceptions/workflow.py b/src/sio3pack/exceptions/workflow.py new file mode 100644 index 0000000..7516f42 --- /dev/null +++ b/src/sio3pack/exceptions/workflow.py @@ -0,0 +1,81 @@ +from enum import Enum + +from sio3pack.exceptions.general import SIO3PackException + + +class WorkflowCreationError(SIO3PackException): + """Raised when there is an error creating a workflow.""" + + +class ParsingFailedOn(Enum): + """Enum to represent the part of the workflow that failed to parse.""" + + WORKFLOW = "workflow" + TASK = "task" + CHANNEL = "channel" + FILESYSTEM = "filesystem" + MOUNT_NAMESPACE = "mount_namespace" + MOUNT_POINT = "mount_point" + RESOURCE_GROUP = "resource_group" + PROCESS = "process" + STREAM = "stream" + + +class WorkflowParsingError(SIO3PackException): + """Raised when there is an error parsing a workflow.""" + + def __init__(self, message, failed_on: ParsingFailedOn, extra_msg: str = None, data: dict = None): + """ + Initialize the WorkflowParsingError. + + :param message: A short description of the error. + :param failed_on: The part of the workflow that failed to parse. + """ + super().__init__(message) + self.message = message + self.failed_on = failed_on + self.extra_msg = extra_msg + self.data = data or {} + + def set_data(self, key: str, value: str): + """ + Set additional data for the exception. + + :param key: The key for the data. + :param value: The value for the data. + """ + self.data[key] = value + + def _generate_full_message(self): + """ + Generate a full message for the exception if not provided. + """ + def task_name(): + msg = f"task {self.data['task_index']}" + if "task_name" in self.data: + msg += f" ({self.data['task_name']})" + return msg + + msg = None + if self.failed_on == ParsingFailedOn.WORKFLOW: + msg = f"Workflow parsing failed while parsing top-level workflow definition." + elif self.failed_on == ParsingFailedOn.TASK: + msg = f"Workflow parsing failed while parsing {task_name()}." + elif self.failed_on == ParsingFailedOn.CHANNEL: + msg = f"Workflow parsing failed while parsing channel configuration {self.data['channel_index']} for {task_name()}." + elif self.failed_on == ParsingFailedOn.FILESYSTEM: + msg = f"Workflow parsing failed while parsing filesystem configuration {self.data['filesystem_index']} for {task_name()}." + elif self.failed_on == ParsingFailedOn.MOUNT_NAMESPACE: + msg = f"Workflow parsing failed while parsing mount namespace {self.data['mount_namespace_index']} for {task_name()}." + elif self.failed_on == ParsingFailedOn.MOUNT_POINT: + msg = f"Workflow parsing failed while parsing mount point {self.data['mountpoint_index']} for mount namespace {self.data['mount_namespace_index']} in {task_name()}." + elif self.failed_on == ParsingFailedOn.RESOURCE_GROUP: + msg = f"Workflow parsing failed while parsing resource group {self.data['resource_group_index']} for {task_name()}." + elif self.failed_on == ParsingFailedOn.PROCESS: + msg = f"Workflow parsing failed while parsing process {self.data['process_index']} for {task_name()}." + elif self.failed_on == ParsingFailedOn.STREAM: + msg = f"Workflow parsing failed while parsing stream {self.data['fd']} for process {self.data['process_index']} for {task_name()}." + + if msg and self.extra_msg: + msg += " " + self.extra_msg + return msg diff --git a/src/sio3pack/workflow/execution/channels.py b/src/sio3pack/workflow/execution/channels.py index cebfcf5..2743f8c 100644 --- a/src/sio3pack/workflow/execution/channels.py +++ b/src/sio3pack/workflow/execution/channels.py @@ -1,3 +1,6 @@ +from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn + + class Channel: """ A configuration of a channel. A channel is a connection between two pipes. @@ -40,6 +43,21 @@ def from_json(cls, data: dict) -> "Channel": :param dict data: The dictionary to create the channel from. """ + for key in ["buffer_size", "source_pipe", "target_pipe"]: + if key not in data: + raise WorkflowParsingError( + f"Missing required key in channel configuration.", + ParsingFailedOn.CHANNEL, + f"Missing required key '{key}' in channel configuration.", + ) + + for key in ["buffer_size", "source_pipe", "target_pipe", "file_buffer_size", "limit"]: + if key in data and not isinstance(data[key], int): + raise WorkflowParsingError( + f"Invalid type for key '{key}' in channel configuration.", + ParsingFailedOn.CHANNEL, + f"Expected integer for '{key}', got {type(data[key]).__name__}.", + ) return cls( data["buffer_size"], diff --git a/src/sio3pack/workflow/execution/descriptors.py b/src/sio3pack/workflow/execution/descriptors.py index 116c4a6..fe6b193 100644 --- a/src/sio3pack/workflow/execution/descriptors.py +++ b/src/sio3pack/workflow/execution/descriptors.py @@ -1,5 +1,6 @@ from typing import ItemsView +from sio3pack.exceptions import WorkflowParsingError from sio3pack.workflow.execution.stream import Stream @@ -38,7 +39,11 @@ def from_json(self, data: dict): :param dict data: The JSON-serializable dictionary to load from. """ for fd, stream_data in data.items(): - stream = Stream.from_json(stream_data, self.objects_manager, self.filesystem_manager) + try: + stream = Stream.from_json(stream_data, self.objects_manager, self.filesystem_manager) + except WorkflowParsingError as e: + e.set_data("fd", fd) + raise e self.add(int(fd), stream) def to_json(self) -> dict: diff --git a/src/sio3pack/workflow/execution/filesystems.py b/src/sio3pack/workflow/execution/filesystems.py index 2a75b2e..62873b9 100644 --- a/src/sio3pack/workflow/execution/filesystems.py +++ b/src/sio3pack/workflow/execution/filesystems.py @@ -1,3 +1,4 @@ +from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn from sio3pack.workflow.object import Object @@ -8,6 +9,8 @@ class Filesystem: :param int id: The id of the filesystem in the task. """ + _required_keys = [] + def __init__(self, id: int = None): """ Represent a filesystem. @@ -32,7 +35,14 @@ def from_json(cls, data: dict, id: int, workflow: "Workflow"): :param int id: The id of the filesystem. :param Workflow workflow: The workflow the filesystem belongs to. """ - return NotImplementedError() + for key in cls._required_keys: + if key not in data: + raise WorkflowParsingError( + "Parsing filesystem failed.", + ParsingFailedOn.FILESYSTEM, + extra_msg=f"Missing required key '{key}' in filesystem definition.", + data={"filesystem_index": id}, + ) def to_json(self) -> dict: """ @@ -61,6 +71,8 @@ class ImageFilesystem(Filesystem): :param str path: The path to the image. If None, the path is "". """ + _required_keys = ["image", "path"] + def __init__(self, image: str, path: str = None, id: int = None): """ Represent an image filesystem. @@ -84,6 +96,7 @@ def from_json(cls, data: dict, id: int, workflow: "Workflow") -> "ImageFilesyste :param id id: The id of the image filesystem. :param Workflow workflow: The workflow the image filesystem belongs to. """ + super().from_json(data, id, workflow) return cls(data["image"], data["path"], id) def to_json(self) -> dict: @@ -96,6 +109,8 @@ def to_json(self) -> dict: class EmptyFilesystem(Filesystem): + _required_keys = [] + def __init__(self, id: int = None): """ Represent an empty filesystem. Can be used as tmpfs. @@ -114,6 +129,7 @@ def from_json(cls, data: dict, id: int, workflow: "Workflow"): :param id: The id of the empty filesystem. :param workflow: The workflow the empty filesystem belongs to. """ + super().from_json(data, id, workflow) return cls(id) def to_json(self) -> dict: @@ -124,6 +140,8 @@ def to_json(self) -> dict: class ObjectFilesystem(Filesystem): + _required_keys = ["handle"] + def __init__(self, object: Object, id: int = None): """ Represent an object filesystem. @@ -144,6 +162,7 @@ def from_json(cls, data: dict, id: int, workflow: "Workflow"): :param id: The id of the object filesystem. :param workflow: The workflow the object filesystem belongs to. """ + super().from_json(data, id, workflow) return cls(workflow.objects_manager.get_or_create_object(data["handle"]), id) def to_json(self) -> dict: @@ -189,13 +208,28 @@ def from_json(self, data: list[dict], workflow: "Workflow"): :param list[dict] data: The list of dictionaries to create the filesystems from. :param Workflow workflow: The workflow the filesystems belong to. """ - for fs in data: + for i, fs in enumerate(data): + if "type" not in fs: + raise WorkflowParsingError( + "Parsing filesystem failed.", + ParsingFailedOn.FILESYSTEM, + extra_msg="Missing 'type' key in filesystem definition.", + data={"filesystem_index": i}, + ) + if fs["type"] == "image": self.filesystems.append(ImageFilesystem.from_json(fs, self.id, workflow)) elif fs["type"] == "empty": self.filesystems.append(EmptyFilesystem.from_json(fs, self.id, workflow)) elif fs["type"] == "object": self.filesystems.append(ObjectFilesystem.from_json(fs, self.id, workflow)) + else: + raise WorkflowParsingError( + "Parsing filesystem failed.", + ParsingFailedOn.FILESYSTEM, + extra_msg=f"Unknown filesystem type '{fs['type']}' in filesystem definition.", + data={"filesystem_index": i}, + ) self.id += 1 def to_json(self) -> list[dict]: @@ -234,3 +268,12 @@ def len(self) -> int: Get the number of filesystems. """ return len(self.filesystems) + + def has_by_id(self, id: int) -> bool: + """ + Check if a filesystem with the given id exists. + + :param id: The id of the filesystem to check. + :return: True if the filesystem exists, False otherwise. + """ + return 0 <= id < len(self.filesystems) and self.filesystems[id] is not None diff --git a/src/sio3pack/workflow/execution/mount_namespace.py b/src/sio3pack/workflow/execution/mount_namespace.py index 03895ea..fb18307 100644 --- a/src/sio3pack/workflow/execution/mount_namespace.py +++ b/src/sio3pack/workflow/execution/mount_namespace.py @@ -1,3 +1,4 @@ +from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn from sio3pack.workflow.execution.filesystems import Filesystem, FilesystemManager @@ -33,6 +34,27 @@ def from_json(cls, data: dict, filesystem_manager: FilesystemManager) -> "Mountp :param dict data: The dictionary to create the mountpoint from. :param FilesystemManager filesystem_manager: The filesystem manager to use. """ + for key in ["source", "target", "writable"]: + if key not in data: + raise WorkflowParsingError( + "Failed parsing mount point", + ParsingFailedOn.MOUNT_POINT, + f"Missing key '{key}' in mount point data.", + ) + for key, type in [("source", int), ("target", str), ("writable", bool), ("capacity", int)]: + if key in data and not isinstance(data[key], type): + raise WorkflowParsingError( + "Failed parsing mount point", + ParsingFailedOn.MOUNT_POINT, + f"Key '{key}' in mount point data is not of type {type.__name__}.", + ) + if not filesystem_manager.has_by_id(int(data["source"])): + raise WorkflowParsingError( + "Failed parsing mount point", + ParsingFailedOn.MOUNT_POINT, + f"Source filesystem with id {data['source']} not found.", + ) + return cls( filesystem_manager.get_by_id(int(data["source"])), data["target"], data["writable"], data.get("capacity") ) @@ -77,8 +99,34 @@ def from_json(cls, data: dict, id: int, filesystem_manager: FilesystemManager): :param id: The id of the mount namespace. :param filesystem_manager: The filesystem manager to use. """ + for key in ["mountpoints", "root"]: + if key not in data: + raise WorkflowParsingError( + "Failed parsing mount namespace", + ParsingFailedOn.MOUNT_NAMESPACE, + f"Missing key '{key}' in mount namespace data.", + data={"mount_namespace_index": str(id)}, + ) + for key, type in [("mountpoints", list), ("root", int)]: + if not isinstance(data[key], type): + raise WorkflowParsingError( + "Failed parsing mount namespace", + ParsingFailedOn.MOUNT_NAMESPACE, + f"Key '{key}' in mount namespace data is not of type {type.__name__}.", + data={"mount_namespace_index": str(id)}, + ) + + mountpoints = [] + for i, mountpoint in enumerate(data["mountpoints"]): + try: + mountpoints.append(Mountpoint.from_json(mountpoint, filesystem_manager)) + except WorkflowParsingError as e: + e.set_data("mount_namespace_index", str(id)) + e.set_data("mountpoint_index", str(i)) + raise e + return cls( - [Mountpoint.from_json(mountpoint, filesystem_manager) for mountpoint in data["mountpoints"]], + mountpoints, data["root"], id, ) @@ -117,8 +165,12 @@ def from_json(self, data: list[dict]): :param data: The list of dictionaries to create the mount namespace manager from. """ - for mount_namespace in data: - self.add(MountNamespace.from_json(mount_namespace, self.id, self.filesystem_manager)) + for i, mount_namespace in enumerate(data): + try: + self.add(MountNamespace.from_json(mount_namespace, self.id, self.filesystem_manager)) + except WorkflowParsingError as e: + e.set_data("mn_index", str(i)) + raise e self.id += 1 def add(self, mount_namespace: MountNamespace): diff --git a/src/sio3pack/workflow/execution/process.py b/src/sio3pack/workflow/execution/process.py index e9c8ed3..fc63fcc 100644 --- a/src/sio3pack/workflow/execution/process.py +++ b/src/sio3pack/workflow/execution/process.py @@ -1,3 +1,4 @@ +from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn from sio3pack.workflow.execution.descriptors import DescriptorManager from sio3pack.workflow.execution.mount_namespace import MountNamespace from sio3pack.workflow.execution.resource_group import ResourceGroup @@ -85,23 +86,69 @@ def from_json(cls, data: dict, workflow: "Workflow", task: "Task"): :param task: The task the process belongs to. """ + for key, type in [("arguments", list), ("environment", list), ("image", str), ("mount_namespace", int), + ("resource_group", int), ("pid_namespace", int), ("working_directory", str), ("descriptors", dict)]: + if key not in data: + raise WorkflowParsingError( + f"Failed parsing process.", + ParsingFailedOn.PROCESS, + f"Missing key '{key}' in process data.", + ) + if not isinstance(data[key], type): + raise WorkflowParsingError( + f"Failed parsing process.", + ParsingFailedOn.PROCESS, + f"Key '{key}' in process data is not of type {type.__name__}.", + ) + if "start_after" in data and not isinstance(data["start_after"], list): + raise WorkflowParsingError( + f"Failed parsing process.", + ParsingFailedOn.PROCESS, + "Key 'start_after' in process data is not of type list.", + ) + env = {} for var in data["environment"]: + if "=" not in var: + raise WorkflowParsingError( + f"Failed parsing process.", + ParsingFailedOn.PROCESS, + f"Environment variable '{var}' does not contain an '=' sign.", + ) key, value = var.split("=", 1) env[key] = value + + try: + mount_namespace = task.mountnamespace_manager.get_by_id(data["mount_namespace"]) + except IndexError: + raise WorkflowParsingError( + f"Failed parsing process.", + ParsingFailedOn.PROCESS, + f"Mount namespace with ID {data['mount_namespace']} not found.", + ) + try: + resource_group = task.resource_group_manager.get_by_id(data["resource_group"]) + except IndexError: + raise WorkflowParsingError( + f"Failed parsing process.", + ParsingFailedOn.PROCESS, + f"Resource group with ID {data['resource_group']} not found.", + ) + process = cls( workflow, task, data["arguments"], env, data["image"], - task.mountnamespace_manager.get_by_id(data["mount_namespace"]), - task.resource_group_manager.get_by_id(data["resource_group"]), + mount_namespace, + resource_group, data["pid_namespace"], data["working_directory"], data.get("start_after", []), ) process.descriptor_manager.from_json(data["descriptors"]) + return process def replace_templates(self, replacements: dict[str, str]): diff --git a/src/sio3pack/workflow/execution/resource_group.py b/src/sio3pack/workflow/execution/resource_group.py index c000580..b6aac99 100644 --- a/src/sio3pack/workflow/execution/resource_group.py +++ b/src/sio3pack/workflow/execution/resource_group.py @@ -1,3 +1,6 @@ +from sio3pack.exceptions.workflow import WorkflowParsingError, ParsingFailedOn + + class ResourceGroup: """ A resource group is a set of limits that can be applied to a task. @@ -75,6 +78,21 @@ def from_json(cls, data: dict, id: int): :param data: The dictionary to create the resource group from. :param id: The id of the resource group. """ + for key, type in [("cpu_usage_limit", float), ("instruction_limit", int), ("memory_limit", int), ("oom_terminate_all_tasks", bool), + ("pid_limit", int), ("swap_limit", int), ("time_limit", int)]: + if key not in data: + raise WorkflowParsingError( + "Parsing resource group failed.", + ParsingFailedOn.RESOURCE_GROUP, + f"Missing key '{key}' in resource group data.", + ) + if not isinstance(data[key], type): + raise WorkflowParsingError( + "Parsing resource group failed.", + ParsingFailedOn.RESOURCE_GROUP, + f"Key '{key}' in resource group data is not of type {type.__name__}.", + ) + return cls( data["cpu_usage_limit"], data["instruction_limit"], @@ -145,8 +163,12 @@ def from_json(self, data: list[dict]): :param data: The list of dictionaries to create the resource group manager from. """ - for resource_group in data: - self.add(ResourceGroup.from_json(resource_group, self.id)) + for i, resource_group in enumerate(data): + try: + self.add(ResourceGroup.from_json(resource_group, self.id)) + except WorkflowParsingError as e: + e.set_data("resource_group_index", str(i)) + raise e self.id += 1 def all(self) -> list[ResourceGroup]: diff --git a/src/sio3pack/workflow/execution/stream.py b/src/sio3pack/workflow/execution/stream.py index 1cd92d6..386f2aa 100644 --- a/src/sio3pack/workflow/execution/stream.py +++ b/src/sio3pack/workflow/execution/stream.py @@ -1,5 +1,6 @@ from enum import Enum +from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn from sio3pack.workflow.execution.filesystems import Filesystem, FilesystemManager from sio3pack.workflow.object import Object, ObjectsManager @@ -38,6 +39,8 @@ class Stream: :param StreamType type: The type of the stream. """ + _required_keys = ["type"] + def __init__(self, type: StreamType): """ Initialize the stream. @@ -46,6 +49,16 @@ def __init__(self, type: StreamType): """ self.type = type + @classmethod + def _check_required_keys(cls, data): + for key in cls._required_keys: + if key not in data: + raise WorkflowParsingError( + f"Failed parsing stream.", + ParsingFailedOn.STREAM, + f"Missing key '{key}' in stream data.", + ) + @classmethod def from_json(cls, data: dict, objects_manager: ObjectsManager, filesystem_manager: FilesystemManager) -> "Stream": """ @@ -55,8 +68,7 @@ def from_json(cls, data: dict, objects_manager: ObjectsManager, filesystem_manag :param ObjectsManager objects_manager: The objects manager. :param FilesystemManager filesystem_manager: The filesystem manager. """ - - type = StreamType(data.get("type")) + type = StreamType(data["type"]) if type == StreamType.FILE: return FileStream.from_json(filesystem_manager, data) elif type == StreamType.NULL: @@ -92,6 +104,8 @@ class FileStream(Stream): :param FileMode mode: The mode to open the file in. """ + _required_keys = ["type", "filesystem", "path", "mode"] + def __init__(self, filesystem: Filesystem, path: str, mode: FileMode): super().__init__(StreamType.FILE) self.filesystem = filesystem @@ -106,10 +120,19 @@ def from_json(cls, filesystem_manager: FilesystemManager, data: dict) -> "FileSt :param FilesystemManager filesystem_manager: The filesystem manager. :param dict data: The JSON-serializable dictionary to create the file stream from. """ + cls._check_required_keys(data) + try: + filesystem = filesystem_manager.get_by_id(data["filesystem"]) + except KeyError: + raise WorkflowParsingError( + "Failed parsing file stream", + ParsingFailedOn.STREAM, + f"Invalid filesystem ID {data['filesystem']} in file stream data.", + ) return cls( - filesystem_manager.get_by_id(data.get("filesystem")), - data.get("path"), - FileMode(data.get("mode")), + filesystem, + data["path"], + FileMode(data["mode"]), ) def to_json(self) -> dict: @@ -142,6 +165,7 @@ def from_json(cls, data: dict) -> "NullStream": :param dict data: The JSON-serializable dictionary to create the null stream from. """ + cls._check_required_keys(data) return cls() def to_json(self) -> dict: @@ -164,6 +188,8 @@ class ObjectStream(Stream): :param Object object: The object to use. """ + _required_keys = ["type", "handle"] + def __init__(self, type: StreamType, object: Object): if type not in (StreamType.OBJECT_READ, StreamType.OBJECT_WRITE): raise ValueError("Invalid stream type for ObjectStream") @@ -177,6 +203,7 @@ def from_json(cls, data: dict, objects_manager: ObjectsManager) -> "ObjectStream :param dict data: The JSON-serializable dictionary to create the object stream from. """ + cls._check_required_keys(data) cl = ObjectReadStream if StreamType(data["type"]) == StreamType.OBJECT_READ else ObjectWriteStream return cl( objects_manager.get_or_create_object(data["handle"]), @@ -242,6 +269,8 @@ class PipeStream(Stream): :param int pipe_index: The index of the pipe. """ + _required_keys = ["type", "pipe"] + def __init__(self, type: StreamType, pipe_index: int): """ Initialize the pipe stream. @@ -261,8 +290,9 @@ def from_json(cls, data: dict) -> "PipeStream": :param dict data: The JSON-serializable dictionary to create the pipe stream from. """ - cl = PipeReadStream if StreamType(data.get("type")) == StreamType.PIPE_READ else PipeWriteStream - return cl(data.get("pipe")) + cls._check_required_keys(data) + cl = PipeReadStream if StreamType(data["type"]) == StreamType.PIPE_READ else PipeWriteStream + return cl(data["pipe"]) def to_json(self) -> dict: """ diff --git a/src/sio3pack/workflow/tasks.py b/src/sio3pack/workflow/tasks.py index 9827839..2e9cbf2 100644 --- a/src/sio3pack/workflow/tasks.py +++ b/src/sio3pack/workflow/tasks.py @@ -1,5 +1,6 @@ import re +from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn from sio3pack.workflow.execution.channels import Channel from sio3pack.workflow.execution.filesystems import Filesystem, FilesystemManager from sio3pack.workflow.execution.mount_namespace import MountNamespace, MountNamespaceManager @@ -21,12 +22,26 @@ def from_json(cls, data: dict, workflow: "Workflow"): :param dict data: The dictionary to create the task from. :param Workflow workflow: The workflow the task belongs to. """ + if "type" not in data: + raise WorkflowParsingError( + "Parsing task failed.", + ParsingFailedOn.TASK, + "Missing key 'type'.", + { "type": "missing_key" }, + ) + + if data["type"] == "execution": return ExecutionTask.from_json(data, workflow) elif data["type"] == "script": return ScriptTask.from_json(data, workflow) else: - raise ValueError(f"Unknown task type: {data['type']}") + raise WorkflowParsingError( + "Parsing task failed.", + ParsingFailedOn.TASK, + f"Unknown task type '{data['type']}'.", + { "type": "wrong_type" }, + ) def to_json(self, reg_map: dict[str, int] = None) -> dict: """ @@ -115,14 +130,37 @@ def from_json(cls, data: dict, workflow: "Workflow"): :param Workflow workflow: The workflow the task belongs to. """ channels = [] - for channel in data.get("channels", []): - channels.append(Channel.from_json(channel)) + for i, channel in enumerate(data.get("channels", [])): + try: + channels.append(Channel.from_json(channel)) + except WorkflowParsingError as e: + e.set_data("channel_index", str(i)) + raise e + + for key in ["name", "exclusive", "pid_namespaces", "pipes", "output_register", "filesystems", "mount_namespaces", "resource_groups", "processes"]: + if key not in data: + raise WorkflowParsingError( + "Parsing task failed.", + ParsingFailedOn.TASK, + f"Missing key '{key}'.", + { "type": "missing_key" }, + ) + + for key in [("hard_time_limit", int), ("exclusive", bool), ("pid_namespaces", int), ("pipes", int)]: + if key[0] in data and not isinstance(data[key[0]], key[1]): + raise WorkflowParsingError( + "Parsing task failed.", + ParsingFailedOn.TASK, + f"Key '{key[0]}' must be of type {key[1].__name__}.", + { "type": "wrong_type" }, + ) + task = cls( data["name"], workflow, data["exclusive"], data.get("hard_time_limit"), - output_register=data.get("output_register"), + output_register=data["output_register"], pid_namespaces=data["pid_namespaces"], pipes=int(data["pipes"]), channels=channels, @@ -130,7 +168,13 @@ def from_json(cls, data: dict, workflow: "Workflow"): task.filesystem_manager.from_json(data["filesystems"], workflow) task.mountnamespace_manager.from_json(data["mount_namespaces"]) task.resource_group_manager.from_json(data["resource_groups"]) - task.processes = [Process.from_json(process, workflow, task) for process in data["processes"]] + task.processes = [] + for i, process in enumerate(data.get("processes")): + try: + task.processes.append(Process.from_json(process, workflow, task)) + except WorkflowParsingError as e: + e.set_data("process_index", str(i)) + raise e return task def to_json(self, reg_map: dict[str, int] = None) -> dict: @@ -267,6 +311,22 @@ def from_json(cls, data: dict, workflow: "Workflow"): :param data: The dictionary to create the task from. :param workflow: The workflow the task belongs to. """ + for key, type in [("name", str), ("reactive", bool), ("input_registers", list), ("output_registers", list), ("script", str)]: + if key not in data: + raise WorkflowParsingError( + "Parsing task failed.", + ParsingFailedOn.TASK, + f"Missing key '{key}'.", + { "type": "missing_key" }, + ) + if not isinstance(data[key], type): + raise WorkflowParsingError( + "Parsing task failed.", + ParsingFailedOn.TASK, + f"Key '{key}' must be of type {type.__name__}.", + { "type": "wrong_type" }, + ) + return cls( data["name"], workflow, diff --git a/src/sio3pack/workflow/workflow.py b/src/sio3pack/workflow/workflow.py index 84d9d09..8ca53b4 100644 --- a/src/sio3pack/workflow/workflow.py +++ b/src/sio3pack/workflow/workflow.py @@ -1,3 +1,4 @@ +from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn from sio3pack.files.file import File from sio3pack.workflow.object import Object, ObjectList, ObjectsManager from sio3pack.workflow.tasks import ExecutionTask, ScriptTask, Task @@ -22,9 +23,23 @@ def from_json(cls, data: dict): :param data: The dictionary to create the workflow from. """ + for key in ["name", "external_objects", "observable_objects", "observable_registers", "tasks"]: + if key not in data: + raise WorkflowParsingError( + "Parsing workflow failed.", + ParsingFailedOn.WORKFLOW, + f"Missing key '{key}'." + ) + workflow = cls(data["name"], data["external_objects"], data["observable_objects"], data["observable_registers"]) - for task in data["tasks"]: - workflow.add_task(Task.from_json(task, workflow)) + for i, task in enumerate(data["tasks"]): + try: + workflow.add_task(Task.from_json(task, workflow)) + except WorkflowParsingError as e: + e.set_data("task_index", str(i)) + e.set_data("task_name", task.get("name", None)) + raise e + return workflow def __init__( From d4aaf281333a6fe01a612942b54fb1102e5982a4 Mon Sep 17 00:00:00 2001 From: MasloMaslane Date: Thu, 5 Jun 2025 13:16:49 +0200 Subject: [PATCH 02/10] Fix tests --- example_workflows/encdec_workflows.json | 8 +++--- .../interactive_io_workflows.json | 8 +++--- example_workflows/inwer.json | 12 ++++---- example_workflows/outgen.json | 12 ++++---- example_workflows/run.json | 20 ++++++------- example_workflows/string_regs.json | 28 +++++++++---------- example_workflows/test_run.json | 8 +++--- example_workflows/user_out.json | 8 +++--- .../custom_workflows/workflows.json | 4 +-- tests/test_packages/encdec/workflows.json | 8 +++--- .../test_packages/interactive/workflows.json | 8 +++--- 11 files changed, 62 insertions(+), 62 deletions(-) diff --git a/example_workflows/encdec_workflows.json b/example_workflows/encdec_workflows.json index af733fd..7e8c686 100644 --- a/example_workflows/encdec_workflows.json +++ b/example_workflows/encdec_workflows.json @@ -41,12 +41,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -152,12 +152,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ diff --git a/example_workflows/interactive_io_workflows.json b/example_workflows/interactive_io_workflows.json index b28845a..a4eb9b2 100644 --- a/example_workflows/interactive_io_workflows.json +++ b/example_workflows/interactive_io_workflows.json @@ -47,12 +47,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -143,12 +143,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ diff --git a/example_workflows/inwer.json b/example_workflows/inwer.json index e272807..4540ff1 100644 --- a/example_workflows/inwer.json +++ b/example_workflows/inwer.json @@ -38,12 +38,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -102,12 +102,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -162,12 +162,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ diff --git a/example_workflows/outgen.json b/example_workflows/outgen.json index 773dd19..b526dd1 100644 --- a/example_workflows/outgen.json +++ b/example_workflows/outgen.json @@ -41,12 +41,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -105,12 +105,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -168,12 +168,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ diff --git a/example_workflows/run.json b/example_workflows/run.json index f92d034..767b022 100644 --- a/example_workflows/run.json +++ b/example_workflows/run.json @@ -41,12 +41,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -105,12 +105,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -195,12 +195,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -285,12 +285,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -375,12 +375,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ diff --git a/example_workflows/string_regs.json b/example_workflows/string_regs.json index 2b6b878..2bc890d 100644 --- a/example_workflows/string_regs.json +++ b/example_workflows/string_regs.json @@ -44,12 +44,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -107,8 +107,8 @@ "pipes": 0, "resource_groups": [ { - "cpu_usage_limit": 100, - "instruction_limit": 30000000000000.0, + "cpu_usage_limit": 100.0, + "instruction_limit": 30000000000000, "memory_limit": 104857600, "oom_terminate_all_tasks": false, "pid_limit": 2, @@ -198,12 +198,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -287,8 +287,8 @@ "pipes": 0, "resource_groups": [ { - "cpu_usage_limit": 100, - "instruction_limit": 30000000000000.0, + "cpu_usage_limit": 100.0, + "instruction_limit": 30000000000000, "memory_limit": 104857600, "oom_terminate_all_tasks": false, "pid_limit": 2, @@ -378,12 +378,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -467,8 +467,8 @@ "pipes": 0, "resource_groups": [ { - "cpu_usage_limit": 100, - "instruction_limit": 30000000000000.0, + "cpu_usage_limit": 100.0, + "instruction_limit": 30000000000000, "memory_limit": 104857600, "oom_terminate_all_tasks": false, "pid_limit": 2, @@ -558,12 +558,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ diff --git a/example_workflows/test_run.json b/example_workflows/test_run.json index 9c7d1f2..538a872 100644 --- a/example_workflows/test_run.json +++ b/example_workflows/test_run.json @@ -39,12 +39,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -103,12 +103,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ diff --git a/example_workflows/user_out.json b/example_workflows/user_out.json index 2585742..83bc815 100644 --- a/example_workflows/user_out.json +++ b/example_workflows/user_out.json @@ -39,12 +39,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -103,12 +103,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ diff --git a/tests/test_packages/custom_workflows/workflows.json b/tests/test_packages/custom_workflows/workflows.json index 2ef6e55..ce47e23 100644 --- a/tests/test_packages/custom_workflows/workflows.json +++ b/tests/test_packages/custom_workflows/workflows.json @@ -36,12 +36,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ diff --git a/tests/test_packages/encdec/workflows.json b/tests/test_packages/encdec/workflows.json index af733fd..7e8c686 100644 --- a/tests/test_packages/encdec/workflows.json +++ b/tests/test_packages/encdec/workflows.json @@ -41,12 +41,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -152,12 +152,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ diff --git a/tests/test_packages/interactive/workflows.json b/tests/test_packages/interactive/workflows.json index b28845a..a4eb9b2 100644 --- a/tests/test_packages/interactive/workflows.json +++ b/tests/test_packages/interactive/workflows.json @@ -47,12 +47,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ @@ -143,12 +143,12 @@ "resource_groups": [ { "cpu_usage_limit": 100.0, - "instruction_limit": 1000000000.0, + "instruction_limit": 1000000000, "memory_limit": 2147483648, "oom_terminate_all_tasks": false, "pid_limit": 2, "swap_limit": 0, - "time_limit": 1000000000.0 + "time_limit": 1000000000 } ], "processes": [ From 306d0abee4f32a19ce5f51fa2dfea561c0ba8344 Mon Sep 17 00:00:00 2001 From: MasloMaslane Date: Thu, 5 Jun 2025 13:18:33 +0200 Subject: [PATCH 03/10] Run formatters --- src/sio3pack/exceptions/__init__.py | 2 +- src/sio3pack/exceptions/workflow.py | 1 + src/sio3pack/workflow/execution/channels.py | 2 +- .../workflow/execution/filesystems.py | 2 +- .../workflow/execution/mount_namespace.py | 2 +- src/sio3pack/workflow/execution/process.py | 14 ++++++-- .../workflow/execution/resource_group.py | 13 +++++-- src/sio3pack/workflow/execution/stream.py | 2 +- src/sio3pack/workflow/tasks.py | 35 +++++++++++++------ src/sio3pack/workflow/workflow.py | 6 ++-- 10 files changed, 54 insertions(+), 25 deletions(-) diff --git a/src/sio3pack/exceptions/__init__.py b/src/sio3pack/exceptions/__init__.py index a4093ac..8ec950e 100644 --- a/src/sio3pack/exceptions/__init__.py +++ b/src/sio3pack/exceptions/__init__.py @@ -1,2 +1,2 @@ from sio3pack.exceptions.general import SIO3PackException -from sio3pack.exceptions.workflow import WorkflowCreationError, WorkflowParsingError, ParsingFailedOn +from sio3pack.exceptions.workflow import ParsingFailedOn, WorkflowCreationError, WorkflowParsingError diff --git a/src/sio3pack/exceptions/workflow.py b/src/sio3pack/exceptions/workflow.py index 7516f42..f4362fc 100644 --- a/src/sio3pack/exceptions/workflow.py +++ b/src/sio3pack/exceptions/workflow.py @@ -50,6 +50,7 @@ def _generate_full_message(self): """ Generate a full message for the exception if not provided. """ + def task_name(): msg = f"task {self.data['task_index']}" if "task_name" in self.data: diff --git a/src/sio3pack/workflow/execution/channels.py b/src/sio3pack/workflow/execution/channels.py index 2743f8c..ef7d579 100644 --- a/src/sio3pack/workflow/execution/channels.py +++ b/src/sio3pack/workflow/execution/channels.py @@ -1,4 +1,4 @@ -from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn +from sio3pack.exceptions import ParsingFailedOn, WorkflowParsingError class Channel: diff --git a/src/sio3pack/workflow/execution/filesystems.py b/src/sio3pack/workflow/execution/filesystems.py index 62873b9..796e8f4 100644 --- a/src/sio3pack/workflow/execution/filesystems.py +++ b/src/sio3pack/workflow/execution/filesystems.py @@ -1,4 +1,4 @@ -from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn +from sio3pack.exceptions import ParsingFailedOn, WorkflowParsingError from sio3pack.workflow.object import Object diff --git a/src/sio3pack/workflow/execution/mount_namespace.py b/src/sio3pack/workflow/execution/mount_namespace.py index fb18307..ec54d3b 100644 --- a/src/sio3pack/workflow/execution/mount_namespace.py +++ b/src/sio3pack/workflow/execution/mount_namespace.py @@ -1,4 +1,4 @@ -from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn +from sio3pack.exceptions import ParsingFailedOn, WorkflowParsingError from sio3pack.workflow.execution.filesystems import Filesystem, FilesystemManager diff --git a/src/sio3pack/workflow/execution/process.py b/src/sio3pack/workflow/execution/process.py index fc63fcc..1b0e20b 100644 --- a/src/sio3pack/workflow/execution/process.py +++ b/src/sio3pack/workflow/execution/process.py @@ -1,4 +1,4 @@ -from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn +from sio3pack.exceptions import ParsingFailedOn, WorkflowParsingError from sio3pack.workflow.execution.descriptors import DescriptorManager from sio3pack.workflow.execution.mount_namespace import MountNamespace from sio3pack.workflow.execution.resource_group import ResourceGroup @@ -86,8 +86,16 @@ def from_json(cls, data: dict, workflow: "Workflow", task: "Task"): :param task: The task the process belongs to. """ - for key, type in [("arguments", list), ("environment", list), ("image", str), ("mount_namespace", int), - ("resource_group", int), ("pid_namespace", int), ("working_directory", str), ("descriptors", dict)]: + for key, type in [ + ("arguments", list), + ("environment", list), + ("image", str), + ("mount_namespace", int), + ("resource_group", int), + ("pid_namespace", int), + ("working_directory", str), + ("descriptors", dict), + ]: if key not in data: raise WorkflowParsingError( f"Failed parsing process.", diff --git a/src/sio3pack/workflow/execution/resource_group.py b/src/sio3pack/workflow/execution/resource_group.py index b6aac99..016a070 100644 --- a/src/sio3pack/workflow/execution/resource_group.py +++ b/src/sio3pack/workflow/execution/resource_group.py @@ -1,4 +1,4 @@ -from sio3pack.exceptions.workflow import WorkflowParsingError, ParsingFailedOn +from sio3pack.exceptions.workflow import ParsingFailedOn, WorkflowParsingError class ResourceGroup: @@ -78,8 +78,15 @@ def from_json(cls, data: dict, id: int): :param data: The dictionary to create the resource group from. :param id: The id of the resource group. """ - for key, type in [("cpu_usage_limit", float), ("instruction_limit", int), ("memory_limit", int), ("oom_terminate_all_tasks", bool), - ("pid_limit", int), ("swap_limit", int), ("time_limit", int)]: + for key, type in [ + ("cpu_usage_limit", float), + ("instruction_limit", int), + ("memory_limit", int), + ("oom_terminate_all_tasks", bool), + ("pid_limit", int), + ("swap_limit", int), + ("time_limit", int), + ]: if key not in data: raise WorkflowParsingError( "Parsing resource group failed.", diff --git a/src/sio3pack/workflow/execution/stream.py b/src/sio3pack/workflow/execution/stream.py index 386f2aa..81d20a2 100644 --- a/src/sio3pack/workflow/execution/stream.py +++ b/src/sio3pack/workflow/execution/stream.py @@ -1,6 +1,6 @@ from enum import Enum -from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn +from sio3pack.exceptions import ParsingFailedOn, WorkflowParsingError from sio3pack.workflow.execution.filesystems import Filesystem, FilesystemManager from sio3pack.workflow.object import Object, ObjectsManager diff --git a/src/sio3pack/workflow/tasks.py b/src/sio3pack/workflow/tasks.py index 2e9cbf2..aee94e5 100644 --- a/src/sio3pack/workflow/tasks.py +++ b/src/sio3pack/workflow/tasks.py @@ -1,6 +1,6 @@ import re -from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn +from sio3pack.exceptions import ParsingFailedOn, WorkflowParsingError from sio3pack.workflow.execution.channels import Channel from sio3pack.workflow.execution.filesystems import Filesystem, FilesystemManager from sio3pack.workflow.execution.mount_namespace import MountNamespace, MountNamespaceManager @@ -27,10 +27,9 @@ def from_json(cls, data: dict, workflow: "Workflow"): "Parsing task failed.", ParsingFailedOn.TASK, "Missing key 'type'.", - { "type": "missing_key" }, + {"type": "missing_key"}, ) - if data["type"] == "execution": return ExecutionTask.from_json(data, workflow) elif data["type"] == "script": @@ -40,7 +39,7 @@ def from_json(cls, data: dict, workflow: "Workflow"): "Parsing task failed.", ParsingFailedOn.TASK, f"Unknown task type '{data['type']}'.", - { "type": "wrong_type" }, + {"type": "wrong_type"}, ) def to_json(self, reg_map: dict[str, int] = None) -> dict: @@ -137,13 +136,23 @@ def from_json(cls, data: dict, workflow: "Workflow"): e.set_data("channel_index", str(i)) raise e - for key in ["name", "exclusive", "pid_namespaces", "pipes", "output_register", "filesystems", "mount_namespaces", "resource_groups", "processes"]: + for key in [ + "name", + "exclusive", + "pid_namespaces", + "pipes", + "output_register", + "filesystems", + "mount_namespaces", + "resource_groups", + "processes", + ]: if key not in data: raise WorkflowParsingError( "Parsing task failed.", ParsingFailedOn.TASK, f"Missing key '{key}'.", - { "type": "missing_key" }, + {"type": "missing_key"}, ) for key in [("hard_time_limit", int), ("exclusive", bool), ("pid_namespaces", int), ("pipes", int)]: @@ -152,7 +161,7 @@ def from_json(cls, data: dict, workflow: "Workflow"): "Parsing task failed.", ParsingFailedOn.TASK, f"Key '{key[0]}' must be of type {key[1].__name__}.", - { "type": "wrong_type" }, + {"type": "wrong_type"}, ) task = cls( @@ -311,20 +320,26 @@ def from_json(cls, data: dict, workflow: "Workflow"): :param data: The dictionary to create the task from. :param workflow: The workflow the task belongs to. """ - for key, type in [("name", str), ("reactive", bool), ("input_registers", list), ("output_registers", list), ("script", str)]: + for key, type in [ + ("name", str), + ("reactive", bool), + ("input_registers", list), + ("output_registers", list), + ("script", str), + ]: if key not in data: raise WorkflowParsingError( "Parsing task failed.", ParsingFailedOn.TASK, f"Missing key '{key}'.", - { "type": "missing_key" }, + {"type": "missing_key"}, ) if not isinstance(data[key], type): raise WorkflowParsingError( "Parsing task failed.", ParsingFailedOn.TASK, f"Key '{key}' must be of type {type.__name__}.", - { "type": "wrong_type" }, + {"type": "wrong_type"}, ) return cls( diff --git a/src/sio3pack/workflow/workflow.py b/src/sio3pack/workflow/workflow.py index 8ca53b4..34e21ad 100644 --- a/src/sio3pack/workflow/workflow.py +++ b/src/sio3pack/workflow/workflow.py @@ -1,4 +1,4 @@ -from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn +from sio3pack.exceptions import ParsingFailedOn, WorkflowParsingError from sio3pack.files.file import File from sio3pack.workflow.object import Object, ObjectList, ObjectsManager from sio3pack.workflow.tasks import ExecutionTask, ScriptTask, Task @@ -26,9 +26,7 @@ def from_json(cls, data: dict): for key in ["name", "external_objects", "observable_objects", "observable_registers", "tasks"]: if key not in data: raise WorkflowParsingError( - "Parsing workflow failed.", - ParsingFailedOn.WORKFLOW, - f"Missing key '{key}'." + "Parsing workflow failed.", ParsingFailedOn.WORKFLOW, f"Missing key '{key}'." ) workflow = cls(data["name"], data["external_objects"], data["observable_objects"], data["observable_registers"]) From e843114b3a9591ecb46310ca2a01ff6bbe32792f Mon Sep 17 00:00:00 2001 From: Mateusz Masiarz Date: Thu, 5 Jun 2025 16:01:14 +0200 Subject: [PATCH 04/10] Better error handling --- src/sio3pack/__init__.py | 8 +- src/sio3pack/django/common/handler.py | 2 +- src/sio3pack/exceptions/__init__.py | 1 + src/sio3pack/exceptions/packages.py | 36 ++++++++ src/sio3pack/exceptions/workflow.py | 4 +- src/sio3pack/packages/exceptions.py | 19 ---- .../packages/package/configuration.py | 4 + src/sio3pack/packages/package/handler.py | 8 +- src/sio3pack/packages/package/model.py | 38 +++----- src/sio3pack/packages/sinolpack/model.py | 89 +++++++++++++++++-- src/sio3pack/packages/sinolpack/workflows.py | 32 +++++-- tests/packages/sinolpack/test_sinolpack.py | 8 +- tests/packages/sinolpack/test_utils.py | 9 +- tests/packages/sinolpack/test_workflows.py | 1 + .../test_sio3pack/test_sinolpack.py | 4 +- 15 files changed, 189 insertions(+), 74 deletions(-) create mode 100644 src/sio3pack/exceptions/packages.py delete mode 100644 src/sio3pack/packages/exceptions.py diff --git a/src/sio3pack/__init__.py b/src/sio3pack/__init__.py index 9fa39ea..d1c2064 100644 --- a/src/sio3pack/__init__.py +++ b/src/sio3pack/__init__.py @@ -1,7 +1,7 @@ __version__ = "1.0.0.dev3" +from sio3pack.exceptions import ImproperlyConfigured from sio3pack.files import LocalFile -from sio3pack.packages.exceptions import * from sio3pack.packages.package import Package __all__ = ["from_file", "from_db"] @@ -39,4 +39,8 @@ def from_db(problem_id: int, configuration: SIO3PackConfig = None) -> Package: configuration.django_settings = settings return Package.from_db(problem_id, configuration) except ImportError: - raise ImproperlyConfigured("sio3pack is not installed with Django support.") + raise ImproperlyConfigured( + "sio3pack is not installed with Django support.", + "from_db function was used, but sio3pack isn't installed with Django support. " + "Read the documentation to learn more." + ) diff --git a/src/sio3pack/django/common/handler.py b/src/sio3pack/django/common/handler.py index 27b91dd..be5e938 100644 --- a/src/sio3pack/django/common/handler.py +++ b/src/sio3pack/django/common/handler.py @@ -16,7 +16,7 @@ ) from sio3pack.files import LocalFile from sio3pack.files.remote_file import RemoteFile -from sio3pack.packages.exceptions import PackageAlreadyExists +from sio3pack.exceptions import PackageAlreadyExists from sio3pack.test import Test from sio3pack.workflow import Workflow diff --git a/src/sio3pack/exceptions/__init__.py b/src/sio3pack/exceptions/__init__.py index 8ec950e..e5b9a4e 100644 --- a/src/sio3pack/exceptions/__init__.py +++ b/src/sio3pack/exceptions/__init__.py @@ -1,2 +1,3 @@ from sio3pack.exceptions.general import SIO3PackException from sio3pack.exceptions.workflow import ParsingFailedOn, WorkflowCreationError, WorkflowParsingError +from sio3pack.exceptions.packages import UnknownPackageType, ImproperlyConfigured, PackageAlreadyExists, ProcessPackageError diff --git a/src/sio3pack/exceptions/packages.py b/src/sio3pack/exceptions/packages.py new file mode 100644 index 0000000..a98d286 --- /dev/null +++ b/src/sio3pack/exceptions/packages.py @@ -0,0 +1,36 @@ +from sio3pack.exceptions.general import SIO3PackException + + +class UnknownPackageType(SIO3PackException): + def __init__(self, arg: str | int) -> None: + if isinstance(arg, str): + self.path = arg + super().__init__( + f"Unknown package type for file {arg}.", + "Tried to load a package which is not a recognized package type. " + f"The package is located at: {arg}" + ) + else: + self.problem_id = arg + super().__init__( + f"Unknown package type for problem with id={arg}.", + "Tried to load a package from the database which does not exist or is not a recognized package type." + ) + + +class ImproperlyConfigured(SIO3PackException): + pass + + +class PackageAlreadyExists(SIO3PackException): + def __init__(self, problem_id: int) -> None: + self.problem_id = problem_id + super().__init__( + f"A package already exists for problem with id={problem_id}.", + "Tried to create a package for a problem which already has a package. " + "Please remove the existing package first or use a different problem ID." + ) + + +class ProcessPackageError(SIO3PackException): + pass diff --git a/src/sio3pack/exceptions/workflow.py b/src/sio3pack/exceptions/workflow.py index f4362fc..d837d9a 100644 --- a/src/sio3pack/exceptions/workflow.py +++ b/src/sio3pack/exceptions/workflow.py @@ -10,6 +10,7 @@ class WorkflowCreationError(SIO3PackException): class ParsingFailedOn(Enum): """Enum to represent the part of the workflow that failed to parse.""" + JSON = "json" WORKFLOW = "workflow" TASK = "task" CHANNEL = "channel" @@ -24,7 +25,7 @@ class ParsingFailedOn(Enum): class WorkflowParsingError(SIO3PackException): """Raised when there is an error parsing a workflow.""" - def __init__(self, message, failed_on: ParsingFailedOn, extra_msg: str = None, data: dict = None): + def __init__(self, message, failed_on: ParsingFailedOn, extra_msg: str = None, data: dict = None, full_message: str = None): """ Initialize the WorkflowParsingError. @@ -35,6 +36,7 @@ def __init__(self, message, failed_on: ParsingFailedOn, extra_msg: str = None, d self.message = message self.failed_on = failed_on self.extra_msg = extra_msg + self._full_message = full_message self.data = data or {} def set_data(self, key: str, value: str): diff --git a/src/sio3pack/packages/exceptions.py b/src/sio3pack/packages/exceptions.py deleted file mode 100644 index 1f9e0e4..0000000 --- a/src/sio3pack/packages/exceptions.py +++ /dev/null @@ -1,19 +0,0 @@ -class UnknownPackageType(Exception): - def __init__(self, arg: str | int) -> None: - if isinstance(arg, str): - self.path = arg - super().__init__(f"Unknown package type for file {arg}.") - else: - self.problem_id = arg - super().__init__(f"Unknown package type for problem with id={arg}.") - - -class ImproperlyConfigured(Exception): - def __init__(self, message: str) -> None: - super().__init__(message) - - -class PackageAlreadyExists(Exception): - def __init__(self, problem_id: int) -> None: - self.problem_id = problem_id - super().__init__(f"A package already exists for problem with id={problem_id}.") diff --git a/src/sio3pack/packages/package/configuration.py b/src/sio3pack/packages/package/configuration.py index 52a2b09..d3089d3 100644 --- a/src/sio3pack/packages/package/configuration.py +++ b/src/sio3pack/packages/package/configuration.py @@ -43,6 +43,7 @@ def __init__( django_settings=None, compilers_config: dict[str, CompilerConfig] = None, extensions_config: dict[str, str] = None, + allow_unrecognized_files: bool = False, ): """ Initialize the configuration with Django settings. @@ -52,9 +53,12 @@ def __init__( and the values are CompilerConfig objects. :param extensions_config: Dictionary of language configurations. The keys are the file extensions, and the values are the corresponding languages. + :param allow_unrecognized_files: If True, allows unrecognized files in in/ and out/ directories. + This is useful when working with packages locally. """ self.django_settings = django_settings self.compilers_config = compilers_config if compilers_config else {} + self.allow_unrecognized_files = allow_unrecognized_files if extensions_config is None: self.extensions_config = { ".cpp": "cpp", diff --git a/src/sio3pack/packages/package/handler.py b/src/sio3pack/packages/package/handler.py index ff535c5..2cae3fa 100644 --- a/src/sio3pack/packages/package/handler.py +++ b/src/sio3pack/packages/package/handler.py @@ -1,6 +1,10 @@ -from sio3pack.packages.exceptions import ImproperlyConfigured +from sio3pack.exceptions import ImproperlyConfigured class NoDjangoHandler: def __call__(self, *args, **kwargs): - raise ImproperlyConfigured("sio3pack is not installed with Django support.") + raise ImproperlyConfigured( + "sio3pack is not installed with Django support.", + "from_db function was used, but sio3pack isn't installed with Django support. " + "Read the documentation to learn more." + ) diff --git a/src/sio3pack/packages/package/model.py b/src/sio3pack/packages/package/model.py index 3e49b3a..a4ab81e 100644 --- a/src/sio3pack/packages/package/model.py +++ b/src/sio3pack/packages/package/model.py @@ -4,7 +4,7 @@ from sio3pack.exceptions import SIO3PackException from sio3pack.files import File, LocalFile -from sio3pack.packages.exceptions import ImproperlyConfigured, UnknownPackageType +from sio3pack.exceptions import ImproperlyConfigured, UnknownPackageType from sio3pack.packages.package.configuration import SIO3PackConfig from sio3pack.packages.package.handler import NoDjangoHandler from sio3pack.test import Test @@ -13,21 +13,6 @@ from sio3pack.workflow import WorkflowManager, WorkflowOperation -def wrap_exceptions(func): - """Decorator to catch exceptions and re-raise them as SIO3PackException.""" - - def decorator(*args, **kwargs): - return func(*args, **kwargs) - # try: - # return func(*args, **kwargs) - # except SIO3PackException: - # raise # Do not wrap SIO3PackExceptions again - # except Exception as e: - # raise SIO3PackException(f"SIO3Pack raised an exception in {func.__name__} function.", e) - - return decorator - - class Package(RegisteredSubclassesBase): """ Base class for all packages. @@ -56,7 +41,6 @@ def __init__(self): self.django = None @classmethod - @wrap_exceptions def identify(cls, file: LocalFile): """ Identify if the package is of this type. @@ -64,7 +48,6 @@ def identify(cls, file: LocalFile): raise NotImplementedError() @classmethod - @wrap_exceptions def from_file(cls, file: LocalFile, configuration=None): """ Create a package from a file. @@ -87,7 +70,6 @@ def _from_file(self, file: LocalFile, configuration=None): self.is_archive = False @classmethod - @wrap_exceptions def identify_db(cls, problem_id: int): """ Identify if the package is of this type. Should check if there @@ -96,7 +78,6 @@ def identify_db(cls, problem_id: int): raise NotImplementedError() @classmethod - @wrap_exceptions def from_db(cls, problem_id: int, configuration: SIO3PackConfig = None): """ Create a package from the database. If sio3pack isn't installed with Django @@ -147,7 +128,11 @@ def _setup_workflows_from_db(self): support, it should raise an ImproperlyConfigured exception. """ if not self.django_enabled: - raise ImproperlyConfigured("Django is not enabled.") + raise ImproperlyConfigured( + "Django is not enabled.", + "If you got this error by properly using SIO3Pack, report this. Otherwise, you should not " + "call private functions." + ) cls = self._workflow_manager_class() self.workflow_manager = cls(self, self.django.workflows) @@ -167,19 +152,15 @@ def __getattr__(self, name: str) -> Any: def reload_config(self): pass - @wrap_exceptions def get_title(self, lang: str | None = None) -> str: raise NotImplementedError("This method should be implemented in subclasses.") - @wrap_exceptions def get_statement(self, lang: str | None = None) -> File | None: raise NotImplementedError("This method should be implemented in subclasses.") - pass def reload_tests(self): pass - @wrap_exceptions def get_test(self, test_id: str) -> Test: raise NotImplementedError("This method should be implemented in subclasses.") @@ -195,7 +176,6 @@ def has_verify(self) -> bool: """ return False - @wrap_exceptions def get_unpack_operation(self, return_func: callable = None) -> WorkflowOperation | None: return self.workflow_manager.get_unpack_operation(self.has_test_gen(), self.has_verify(), return_func) @@ -312,9 +292,11 @@ def get_file_language(self, file: File | str) -> str: if ext in self.configuration.extensions_config: return self.configuration.extensions_config[ext] else: - raise SIO3PackException(f"Unknown file extension '{ext}' for file '{file}'") + raise SIO3PackException( + f"Unknown file extension '{ext}' for file '{file}'", + "Tried to get the language of a file by its extension, but the extension is not recognized." + ) - @wrap_exceptions def save_to_db(self, problem_id: int): """ Save the package to the database. If sio3pack isn't installed with Django diff --git a/src/sio3pack/packages/sinolpack/model.py b/src/sio3pack/packages/sinolpack/model.py index f486329..693a869 100644 --- a/src/sio3pack/packages/sinolpack/model.py +++ b/src/sio3pack/packages/sinolpack/model.py @@ -6,8 +6,8 @@ import yaml +from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn, ProcessPackageError, ImproperlyConfigured from sio3pack.files import File, LocalFile -from sio3pack.packages.exceptions import ImproperlyConfigured from sio3pack.packages.package import Package from sio3pack.packages.package.configuration import SIO3PackConfig from sio3pack.packages.sinolpack import constants @@ -122,7 +122,12 @@ def _from_file(self, file: LocalFile, configuration: SIO3PackConfig = None): workflows = json.load(f) self.workflow_manager = SinolpackWorkflowManager(self, workflows) except json.JSONDecodeError as e: - raise ValueError(f"Invalid JSON in workflows.json: {e}") + raise WorkflowParsingError( + f"Invalid JSON in workflows.json: {e}", + ParsingFailedOn.JSON, + full_message="Invalid JSON in workflows.json file. " + "Please check the file for syntax errors.", + ) else: self.workflow_manager = self._default_workflow_manager() @@ -134,7 +139,11 @@ def _from_db(self, problem_id: int, configuration: SIO3PackConfig = None): # TODO: Workflows probably should be fetched only if they are needed, since this can be slow super()._setup_workflows_from_db() if not self.django_enabled: - raise ImproperlyConfigured("sio3pack is not installed with Django support.") + raise ImproperlyConfigured( + "sio3pack is not installed with Django support.", + "from_db function was used, but sio3pack isn't installed with Django support. " + "Read the documentation to learn more." + ) def _workflow_manager_class(self) -> Type[WorkflowManager]: return SinolpackWorkflowManager @@ -192,6 +201,8 @@ def _process_config_yml(self): try: config = self.get_in_root("config.yml") self.config = yaml.safe_load(config.read()) + + # Support for local packages self.short_name = self.config.get("sinol_task_id", self.short_name) except FileNotFoundError: self.config = {} @@ -334,7 +345,14 @@ def _process_prog_files(self): lf = LocalFile(os.path.join(self.get_prog_dir(), file)) self.additional_files.append(lf) except FileNotFoundError: - pass + where = "extra_compilation_files" if file in self.config.get("extra_compilation_files", []) else "extra_execution_files" + raise ProcessPackageError( + f"Extra file '{file}' from {where} not found.", + f"The extra file '{file}' specified in the config.yml file under {where} does not exist in the " + f"prog/ directory of the problem package. " + f"Please check the package structure and ensure that the file is present or remove it from the config." + ) + extensions = self.get_submittable_extensions() + ["sh"] self.special_files: dict[str, File | None] = {} for file in self.special_file_types(): @@ -359,7 +377,12 @@ def _process_extra_files(self): lf = LocalFile(os.path.join(self.rootdir, file)) self.extra_files[file] = lf except FileNotFoundError: - pass + raise ProcessPackageError( + f"Extra file '{file}' not found.", + f"The extra file '{file}' specified in the config.yml file does not exist in the package. " + f"Path to this file should be relative to the root directory of the package. " + f"Please check the package structure and ensure that the file is present or remove it from the config." + ) def get_extra_file(self, package_path: str) -> File | None: """ @@ -479,6 +502,12 @@ def _process_existing_tests(self): test_id = match.group(1) group = match.group(2) test_ids.add((test_id, group, test_name)) + elif not self.configuration.allow_unrecognized_files: + raise ProcessPackageError( + f"Unrecognized test in {ext} directory: {file}", + f"All files in the {ext} directory should match the pattern: " + f"{self._get_test_regex()}." + ) # TODO: Sort this properly test_ids = sorted(test_ids) self.tests = [] @@ -566,7 +595,49 @@ def _unpack_return_data(self, data: dict): Adds data received from the unpack operation to the package. """ # TODO: implement. The unpack will probably return tests, so we need to process them. - pass + + # After parsing new tests, verify them + self._verify_tests() + self._verify_limits() + + def _verify_tests(self): + """ + Verifies the tests in the package. This function should be called after unpacking + new tests to ensure they are valid and conform to the expected structure. + """ + for test in self.tests: + if not test.in_file: + raise ProcessPackageError( + f"Input test is missing for test {test.test_id}.", + "All tests must have input and output files. The input file is missing for test " + f"{test.test_id}. Please check the package structure and ingen." + ) + if not test.out_file: + raise ProcessPackageError( + f"Output test is missing for test {test.test_id}.", + "All tests must have input and output files. The output file is missing for test " + f"{test.test_id}. Please check the package structure and outgen." + ) + + def _verify_limits(self): + """ + Verifies that sum of time limits for all tests does not exceed + the maximum allowed time limit for the problem. + """ + limit = self._get_from_django_settings("MAX_TEST_TIME_LIMIT_PER_PROBLEM") + if limit is None: + return + tl_sum = 0 + for test in self.tests: + tl_sum += self.get_time_limit_for_test(test, "cpp") # Assuming C++ as the default language + if tl_sum > limit: + tl_sum /= 1000 # Convert to seconds + limit /= 1000 # Convert to seconds + raise ProcessPackageError( + "Sum of time limits for all tests exceeds the maximum allowed limit.", + f"The sum of time limits for all tests ({tl_sum} seconds) exceeds the maximum allowed limit ({limit} seconds). " + f"Please adjust the time limits in the config.yml file or reduce the number of tests." + ) def save_to_db(self, problem_id: int): """ @@ -575,7 +646,11 @@ def save_to_db(self, problem_id: int): """ self._setup_django_handler(problem_id) if not self.django_enabled: - raise ImproperlyConfigured("sio3pack is not installed with Django support.") + raise ImproperlyConfigured( + "sio3pack is not installed with Django support.", + "save_to_db function was used, but sio3pack isn't installed with Django support. " + "Read the documentation to learn more." + ) self.django.save_to_db() def _get_compiler_flags(self, lang: str) -> list[str]: diff --git a/src/sio3pack/packages/sinolpack/workflows.py b/src/sio3pack/packages/sinolpack/workflows.py index 9fa769a..e687423 100644 --- a/src/sio3pack/packages/sinolpack/workflows.py +++ b/src/sio3pack/packages/sinolpack/workflows.py @@ -67,7 +67,10 @@ def _get_ingen_workflow(self) -> Workflow: ) ingen_path = self.package.get_ingen_path() if not ingen_path: - raise WorkflowCreationError("Creating ingen workflow when no ingen present") + raise WorkflowCreationError( + "Creating ingen workflow when no ingen present", + "Creating workflow for running ingen isn't possible, because ingen is not present in the package.", + ) ingen = workflow.objects_manager.get_or_create_object(ingen_path) workflow.add_external_object(ingen) @@ -216,17 +219,28 @@ def _add_extra_files_to_replace(self, workflow: Workflow, to_replace: dict[str, for file in extra_files: extra_file = self.package.get_extra_file(file) if extra_file is None: - raise WorkflowCreationError(f"Extra file {file} not found in package.") + raise WorkflowCreationError( + f"Extra file {file} not found in package.", + f"Extra file '{file}' was used in the workflow, but it was not found in the package. " + f"Extra files have to be specified in the config and their path should be relative to the package root.", + ) to_replace[f""] = extra_file.path executable_extra = workflow.find_by_regex_in_objects(r"^$", 1) for file in executable_extra: extra_file = self.package.get_extra_file(file) if extra_file is None: - raise WorkflowCreationError(f"Extra file {file} not found in package.") + raise WorkflowCreationError( + f"Extra file {file} not found in package.", + f"Extra file '{file}' was used in the workflow, but it was not found in the package. " + f"Extra files have to be specified in the config and their path should be relative to the package root.", + ) extra_file = self.package.get_executable_path(extra_file) if extra_file is None: - raise WorkflowCreationError(f"Extra file {file} is not executable.") + raise WorkflowCreationError( + f"Extra file {file} is not executable.", + f"Extra file '{file}' was used in the workflow, but it is not executable." + ) to_replace[f""] = extra_file return to_replace @@ -309,7 +323,10 @@ def _get_generate_tests_workflows(self, data: dict) -> tuple[Workflow, bool]: # Compile outgen outgen_path = self.package.get_outgen_path() if not outgen_path: - raise WorkflowCreationError("Creating outgen workflow when no model solution present") + raise WorkflowCreationError( + "Creating outgen workflow when no model solution present", + "Creating workflow for running outgen isn't possible, because outgen is not present in the package.", + ) outgen_obj = workflow.objects_manager.get_or_create_object(outgen_path) workflow.add_external_object(outgen_obj) compile_wf, outgen_exe_path = self.get_compile_file_workflow(outgen_path) @@ -450,7 +467,10 @@ def _get_verify_workflows(self, data: dict) -> tuple[Workflow, bool]: # Compile inwer inwer_path = self.package.get_inwer_path() if not inwer_path: - raise WorkflowCreationError("Creating inwer workflow when no inwer present") + raise WorkflowCreationError( + "Creating inwer workflow when no inwer present", + "Creating workflow for running inwer isn't possible, because inwer is not present in the package.", + ) inwer_obj = workflow.objects_manager.get_or_create_object(inwer_path) workflow.add_external_object(inwer_obj) compile_wf, inwer_exe_path = self.get_compile_file_workflow(inwer_path) diff --git a/tests/packages/sinolpack/test_sinolpack.py b/tests/packages/sinolpack/test_sinolpack.py index d5fc86a..4cabd6a 100644 --- a/tests/packages/sinolpack/test_sinolpack.py +++ b/tests/packages/sinolpack/test_sinolpack.py @@ -1,6 +1,8 @@ import pytest import sio3pack +from sio3pack import SIO3PackConfig +from sio3pack.exceptions import ImproperlyConfigured from tests.fixtures import Compression, PackageInfo, get_archived_package, get_package from tests.packages.sinolpack.utils import common_checks @@ -8,7 +10,7 @@ @pytest.mark.parametrize("get_archived_package", [("simple", c) for c in Compression], indirect=True) def test_from_file(get_archived_package): package_info: PackageInfo = get_archived_package() - package = sio3pack.from_file(package_info.path) + package = sio3pack.from_file(package_info.path, SIO3PackConfig(allow_unrecognized_files=True)) common_checks(package_info, package) if package_info.is_archive(): assert package.is_archive @@ -20,9 +22,9 @@ def test_from_file(get_archived_package): @pytest.mark.parametrize("get_package", ["simple"], indirect=True) def test_no_django(get_package): package_info: PackageInfo = get_package() - with pytest.raises(sio3pack.ImproperlyConfigured): + with pytest.raises(ImproperlyConfigured): sio3pack.from_db(1) package = sio3pack.from_file(package_info.path) - with pytest.raises(sio3pack.ImproperlyConfigured): + with pytest.raises(ImproperlyConfigured): package.save_to_db(1) diff --git a/tests/packages/sinolpack/test_utils.py b/tests/packages/sinolpack/test_utils.py index a6c91e5..cac2e7f 100644 --- a/tests/packages/sinolpack/test_utils.py +++ b/tests/packages/sinolpack/test_utils.py @@ -1,6 +1,7 @@ import pytest import sio3pack +from sio3pack import SIO3PackConfig from sio3pack.packages.sinolpack import Sinolpack, constants from sio3pack.test import Test from tests.fixtures import PackageInfo, get_package @@ -9,7 +10,7 @@ @pytest.mark.parametrize("get_package", ["simple"], indirect=True) def test_get_test_id(get_package): package_info: PackageInfo = get_package() - package: Sinolpack = sio3pack.from_file(package_info.path) + package: Sinolpack = sio3pack.from_file(package_info.path, SIO3PackConfig(allow_unrecognized_files=True)) assert package.get_test_id_from_filename("abc1a.in") == "1a" assert package.get_test_id_from_filename("abc0.in") == "0" @@ -26,7 +27,7 @@ def test_get_test_id(get_package): @pytest.mark.parametrize("get_package", ["simple"], indirect=True) def test_get_group(get_package): package_info: PackageInfo = get_package() - package: Sinolpack = sio3pack.from_file(package_info.path) + package: Sinolpack = sio3pack.from_file(package_info.path, SIO3PackConfig(allow_unrecognized_files=True)) assert package.get_group_from_filename("abc1a.in") == "1" assert package.get_group_from_filename("abc0.in") == "0" @@ -43,7 +44,7 @@ def test_get_group(get_package): @pytest.mark.parametrize("get_package", ["simple"], indirect=True) def test_get_corresponding_out(get_package): package_info: PackageInfo = get_package() - package: Sinolpack = sio3pack.from_file(package_info.path) + package: Sinolpack = sio3pack.from_file(package_info.path, SIO3PackConfig(allow_unrecognized_files=True)) assert package.get_corresponding_out_filename("abc1a.in") == "abc1a.out" assert package.get_corresponding_out_filename("abc0.in") == "abc0.out" @@ -56,7 +57,7 @@ def test_get_corresponding_out(get_package): @pytest.mark.parametrize("get_package", ["simple"], indirect=True) def test_get_limits(get_package): package_info: PackageInfo = get_package() - package: Sinolpack = sio3pack.from_file(package_info.path) + package: Sinolpack = sio3pack.from_file(package_info.path, SIO3PackConfig(allow_unrecognized_files=True)) package.config = { "time_limit": 1000, diff --git a/tests/packages/sinolpack/test_workflows.py b/tests/packages/sinolpack/test_workflows.py index 956d775..1c60a93 100644 --- a/tests/packages/sinolpack/test_workflows.py +++ b/tests/packages/sinolpack/test_workflows.py @@ -26,6 +26,7 @@ def _get_run_types() -> list[str]: def _get_package(package_info: PackageInfo, type: str, config: SIO3PackConfig = None): config = config or SIO3PackConfig.detect() + config.allow_unrecognized_files = True if type == "file": return sio3pack.from_file(package_info.path, config) elif type == "db": diff --git a/tests/test_django/test_sio3pack/test_sinolpack.py b/tests/test_django/test_sio3pack/test_sinolpack.py index cf6ab1d..b7be39e 100644 --- a/tests/test_django/test_sio3pack/test_sinolpack.py +++ b/tests/test_django/test_sio3pack/test_sinolpack.py @@ -16,6 +16,8 @@ def _save_and_test_simple(package_info: PackageInfo, config: SIO3PackConfig = None) -> tuple[Sinolpack, SIO3Package]: assert package_info.type == "sinolpack" + config = config or SIO3PackConfig(allow_unrecognized_files=True) + config.allow_unrecognized_files = True package = sio3pack.from_file(package_info.path, config) assert isinstance(package, Sinolpack) package.save_to_db(1) @@ -33,7 +35,7 @@ def test_simple(get_archived_package): assert package.get_title() == db_package.full_name - with pytest.raises(sio3pack.PackageAlreadyExists): + with pytest.raises(sio3pack.exceptions.packages.PackageAlreadyExists): package.save_to_db(1) From 8d38be18f0cc5cea1a0cbb1b75c055144db1e202 Mon Sep 17 00:00:00 2001 From: Mateusz Masiarz Date: Thu, 5 Jun 2025 16:02:16 +0200 Subject: [PATCH 05/10] Run formatters --- src/sio3pack/__init__.py | 2 +- src/sio3pack/django/common/handler.py | 3 +- src/sio3pack/exceptions/__init__.py | 7 +++- src/sio3pack/exceptions/packages.py | 7 ++-- src/sio3pack/exceptions/workflow.py | 4 ++- src/sio3pack/packages/package/handler.py | 2 +- src/sio3pack/packages/package/model.py | 7 ++-- src/sio3pack/packages/sinolpack/model.py | 36 +++++++++++--------- src/sio3pack/packages/sinolpack/workflows.py | 2 +- 9 files changed, 38 insertions(+), 32 deletions(-) diff --git a/src/sio3pack/__init__.py b/src/sio3pack/__init__.py index d1c2064..8504354 100644 --- a/src/sio3pack/__init__.py +++ b/src/sio3pack/__init__.py @@ -42,5 +42,5 @@ def from_db(problem_id: int, configuration: SIO3PackConfig = None) -> Package: raise ImproperlyConfigured( "sio3pack is not installed with Django support.", "from_db function was used, but sio3pack isn't installed with Django support. " - "Read the documentation to learn more." + "Read the documentation to learn more.", ) diff --git a/src/sio3pack/django/common/handler.py b/src/sio3pack/django/common/handler.py index be5e938..023e940 100644 --- a/src/sio3pack/django/common/handler.py +++ b/src/sio3pack/django/common/handler.py @@ -4,7 +4,6 @@ from django.core.files import File from django.db import transaction -import sio3pack from sio3pack.django.common.models import ( SIO3Package, SIO3PackMainModelSolution, @@ -14,9 +13,9 @@ SIO3PackTest, SIO3PackWorkflow, ) +from sio3pack.exceptions import PackageAlreadyExists from sio3pack.files import LocalFile from sio3pack.files.remote_file import RemoteFile -from sio3pack.exceptions import PackageAlreadyExists from sio3pack.test import Test from sio3pack.workflow import Workflow diff --git a/src/sio3pack/exceptions/__init__.py b/src/sio3pack/exceptions/__init__.py index e5b9a4e..659940a 100644 --- a/src/sio3pack/exceptions/__init__.py +++ b/src/sio3pack/exceptions/__init__.py @@ -1,3 +1,8 @@ from sio3pack.exceptions.general import SIO3PackException +from sio3pack.exceptions.packages import ( + ImproperlyConfigured, + PackageAlreadyExists, + ProcessPackageError, + UnknownPackageType, +) from sio3pack.exceptions.workflow import ParsingFailedOn, WorkflowCreationError, WorkflowParsingError -from sio3pack.exceptions.packages import UnknownPackageType, ImproperlyConfigured, PackageAlreadyExists, ProcessPackageError diff --git a/src/sio3pack/exceptions/packages.py b/src/sio3pack/exceptions/packages.py index a98d286..79ce952 100644 --- a/src/sio3pack/exceptions/packages.py +++ b/src/sio3pack/exceptions/packages.py @@ -7,14 +7,13 @@ def __init__(self, arg: str | int) -> None: self.path = arg super().__init__( f"Unknown package type for file {arg}.", - "Tried to load a package which is not a recognized package type. " - f"The package is located at: {arg}" + "Tried to load a package which is not a recognized package type. " f"The package is located at: {arg}", ) else: self.problem_id = arg super().__init__( f"Unknown package type for problem with id={arg}.", - "Tried to load a package from the database which does not exist or is not a recognized package type." + "Tried to load a package from the database which does not exist or is not a recognized package type.", ) @@ -28,7 +27,7 @@ def __init__(self, problem_id: int) -> None: super().__init__( f"A package already exists for problem with id={problem_id}.", "Tried to create a package for a problem which already has a package. " - "Please remove the existing package first or use a different problem ID." + "Please remove the existing package first or use a different problem ID.", ) diff --git a/src/sio3pack/exceptions/workflow.py b/src/sio3pack/exceptions/workflow.py index d837d9a..ca39d5f 100644 --- a/src/sio3pack/exceptions/workflow.py +++ b/src/sio3pack/exceptions/workflow.py @@ -25,7 +25,9 @@ class ParsingFailedOn(Enum): class WorkflowParsingError(SIO3PackException): """Raised when there is an error parsing a workflow.""" - def __init__(self, message, failed_on: ParsingFailedOn, extra_msg: str = None, data: dict = None, full_message: str = None): + def __init__( + self, message, failed_on: ParsingFailedOn, extra_msg: str = None, data: dict = None, full_message: str = None + ): """ Initialize the WorkflowParsingError. diff --git a/src/sio3pack/packages/package/handler.py b/src/sio3pack/packages/package/handler.py index 2cae3fa..20f4e5d 100644 --- a/src/sio3pack/packages/package/handler.py +++ b/src/sio3pack/packages/package/handler.py @@ -6,5 +6,5 @@ def __call__(self, *args, **kwargs): raise ImproperlyConfigured( "sio3pack is not installed with Django support.", "from_db function was used, but sio3pack isn't installed with Django support. " - "Read the documentation to learn more." + "Read the documentation to learn more.", ) diff --git a/src/sio3pack/packages/package/model.py b/src/sio3pack/packages/package/model.py index a4ab81e..c93a069 100644 --- a/src/sio3pack/packages/package/model.py +++ b/src/sio3pack/packages/package/model.py @@ -2,9 +2,8 @@ import os from typing import Any, Type -from sio3pack.exceptions import SIO3PackException +from sio3pack.exceptions import ImproperlyConfigured, SIO3PackException, UnknownPackageType from sio3pack.files import File, LocalFile -from sio3pack.exceptions import ImproperlyConfigured, UnknownPackageType from sio3pack.packages.package.configuration import SIO3PackConfig from sio3pack.packages.package.handler import NoDjangoHandler from sio3pack.test import Test @@ -131,7 +130,7 @@ def _setup_workflows_from_db(self): raise ImproperlyConfigured( "Django is not enabled.", "If you got this error by properly using SIO3Pack, report this. Otherwise, you should not " - "call private functions." + "call private functions.", ) cls = self._workflow_manager_class() self.workflow_manager = cls(self, self.django.workflows) @@ -294,7 +293,7 @@ def get_file_language(self, file: File | str) -> str: else: raise SIO3PackException( f"Unknown file extension '{ext}' for file '{file}'", - "Tried to get the language of a file by its extension, but the extension is not recognized." + "Tried to get the language of a file by its extension, but the extension is not recognized.", ) def save_to_db(self, problem_id: int): diff --git a/src/sio3pack/packages/sinolpack/model.py b/src/sio3pack/packages/sinolpack/model.py index 693a869..87bbb8e 100644 --- a/src/sio3pack/packages/sinolpack/model.py +++ b/src/sio3pack/packages/sinolpack/model.py @@ -6,7 +6,7 @@ import yaml -from sio3pack.exceptions import WorkflowParsingError, ParsingFailedOn, ProcessPackageError, ImproperlyConfigured +from sio3pack.exceptions import ImproperlyConfigured, ParsingFailedOn, ProcessPackageError, WorkflowParsingError from sio3pack.files import File, LocalFile from sio3pack.packages.package import Package from sio3pack.packages.package.configuration import SIO3PackConfig @@ -125,8 +125,7 @@ def _from_file(self, file: LocalFile, configuration: SIO3PackConfig = None): raise WorkflowParsingError( f"Invalid JSON in workflows.json: {e}", ParsingFailedOn.JSON, - full_message="Invalid JSON in workflows.json file. " - "Please check the file for syntax errors.", + full_message="Invalid JSON in workflows.json file. " "Please check the file for syntax errors.", ) else: self.workflow_manager = self._default_workflow_manager() @@ -142,7 +141,7 @@ def _from_db(self, problem_id: int, configuration: SIO3PackConfig = None): raise ImproperlyConfigured( "sio3pack is not installed with Django support.", "from_db function was used, but sio3pack isn't installed with Django support. " - "Read the documentation to learn more." + "Read the documentation to learn more.", ) def _workflow_manager_class(self) -> Type[WorkflowManager]: @@ -345,12 +344,16 @@ def _process_prog_files(self): lf = LocalFile(os.path.join(self.get_prog_dir(), file)) self.additional_files.append(lf) except FileNotFoundError: - where = "extra_compilation_files" if file in self.config.get("extra_compilation_files", []) else "extra_execution_files" + where = ( + "extra_compilation_files" + if file in self.config.get("extra_compilation_files", []) + else "extra_execution_files" + ) raise ProcessPackageError( f"Extra file '{file}' from {where} not found.", f"The extra file '{file}' specified in the config.yml file under {where} does not exist in the " f"prog/ directory of the problem package. " - f"Please check the package structure and ensure that the file is present or remove it from the config." + f"Please check the package structure and ensure that the file is present or remove it from the config.", ) extensions = self.get_submittable_extensions() + ["sh"] @@ -381,7 +384,7 @@ def _process_extra_files(self): f"Extra file '{file}' not found.", f"The extra file '{file}' specified in the config.yml file does not exist in the package. " f"Path to this file should be relative to the root directory of the package. " - f"Please check the package structure and ensure that the file is present or remove it from the config." + f"Please check the package structure and ensure that the file is present or remove it from the config.", ) def get_extra_file(self, package_path: str) -> File | None: @@ -505,8 +508,7 @@ def _process_existing_tests(self): elif not self.configuration.allow_unrecognized_files: raise ProcessPackageError( f"Unrecognized test in {ext} directory: {file}", - f"All files in the {ext} directory should match the pattern: " - f"{self._get_test_regex()}." + f"All files in the {ext} directory should match the pattern: " f"{self._get_test_regex()}.", ) # TODO: Sort this properly test_ids = sorted(test_ids) @@ -610,13 +612,13 @@ def _verify_tests(self): raise ProcessPackageError( f"Input test is missing for test {test.test_id}.", "All tests must have input and output files. The input file is missing for test " - f"{test.test_id}. Please check the package structure and ingen." + f"{test.test_id}. Please check the package structure and ingen.", ) if not test.out_file: raise ProcessPackageError( f"Output test is missing for test {test.test_id}.", "All tests must have input and output files. The output file is missing for test " - f"{test.test_id}. Please check the package structure and outgen." + f"{test.test_id}. Please check the package structure and outgen.", ) def _verify_limits(self): @@ -636,7 +638,7 @@ def _verify_limits(self): raise ProcessPackageError( "Sum of time limits for all tests exceeds the maximum allowed limit.", f"The sum of time limits for all tests ({tl_sum} seconds) exceeds the maximum allowed limit ({limit} seconds). " - f"Please adjust the time limits in the config.yml file or reduce the number of tests." + f"Please adjust the time limits in the config.yml file or reduce the number of tests.", ) def save_to_db(self, problem_id: int): @@ -646,11 +648,11 @@ def save_to_db(self, problem_id: int): """ self._setup_django_handler(problem_id) if not self.django_enabled: - raise ImproperlyConfigured( - "sio3pack is not installed with Django support.", - "save_to_db function was used, but sio3pack isn't installed with Django support. " - "Read the documentation to learn more." - ) + raise ImproperlyConfigured( + "sio3pack is not installed with Django support.", + "save_to_db function was used, but sio3pack isn't installed with Django support. " + "Read the documentation to learn more.", + ) self.django.save_to_db() def _get_compiler_flags(self, lang: str) -> list[str]: diff --git a/src/sio3pack/packages/sinolpack/workflows.py b/src/sio3pack/packages/sinolpack/workflows.py index e687423..70f7024 100644 --- a/src/sio3pack/packages/sinolpack/workflows.py +++ b/src/sio3pack/packages/sinolpack/workflows.py @@ -239,7 +239,7 @@ def _add_extra_files_to_replace(self, workflow: Workflow, to_replace: dict[str, if extra_file is None: raise WorkflowCreationError( f"Extra file {file} is not executable.", - f"Extra file '{file}' was used in the workflow, but it is not executable." + f"Extra file '{file}' was used in the workflow, but it is not executable.", ) to_replace[f""] = extra_file return to_replace From 719af4bc7cb6b80c363d8b7d15773a6c0d7c592d Mon Sep 17 00:00:00 2001 From: Mateusz Masiarz Date: Thu, 5 Jun 2025 16:11:52 +0200 Subject: [PATCH 06/10] Docstring for documentation --- src/sio3pack/exceptions/general.py | 13 ++++++++---- src/sio3pack/exceptions/packages.py | 32 +++++++++++++++++++++++++++++ src/sio3pack/exceptions/workflow.py | 27 ++++++++++++++++++------ 3 files changed, 62 insertions(+), 10 deletions(-) diff --git a/src/sio3pack/exceptions/general.py b/src/sio3pack/exceptions/general.py index 84e99a7..5494e03 100644 --- a/src/sio3pack/exceptions/general.py +++ b/src/sio3pack/exceptions/general.py @@ -1,12 +1,17 @@ class SIO3PackException(Exception): - """A wrapper for all exceptions raised by SIO3Pack.""" + """ + A base class for all custom exceptions raised by SIO3Pack. - def __init__(self, message, full_message=None): + :param str message: A short description of the error. + :param str full_message: A detailed description of the error, if available. + """ + + def __init__(self, message: str, full_message: str = None): """ Initialize the SIO3PackException. - :param message: A short description of the error. - :param full_message: A detailed description of the error, if available. + :param str message: A short description of the error. + :param str full_message: A detailed description of the error, if available. """ super().__init__(message) self.message = message diff --git a/src/sio3pack/exceptions/packages.py b/src/sio3pack/exceptions/packages.py index 79ce952..b8a14f2 100644 --- a/src/sio3pack/exceptions/packages.py +++ b/src/sio3pack/exceptions/packages.py @@ -2,7 +2,19 @@ class UnknownPackageType(SIO3PackException): + """ + Exception raised when trying to load a package of an unknown type. + This can happen when the package file is not recognized or when the package + is not a valid package type in the database. + + :param str | int arg: The path to the package file or the problem ID. + """ def __init__(self, arg: str | int) -> None: + """ + Initialize the UnknownPackageType exception. + + :param str | int arg: The path to the package file or the problem ID. + """ if isinstance(arg, str): self.path = arg super().__init__( @@ -18,11 +30,27 @@ def __init__(self, arg: str | int) -> None: class ImproperlyConfigured(SIO3PackException): + """ + Exception raised when the package is improperly configured, i.e., using Django features + without Django being installed. + """ pass class PackageAlreadyExists(SIO3PackException): + """ + Exception raised when trying to create a package for a problem that already has a package. + + :param int problem_id: The ID of the problem for which the package already exists. + """ + def __init__(self, problem_id: int) -> None: + """ + Initialize the PackageAlreadyExists exception. + + :param int problem_id: The ID of the problem for which the package already exists. + """ + self.problem_id = problem_id super().__init__( f"A package already exists for problem with id={problem_id}.", @@ -32,4 +60,8 @@ def __init__(self, problem_id: int) -> None: class ProcessPackageError(SIO3PackException): + """ + Exception raised when there is an error processing a package. + """ + pass diff --git a/src/sio3pack/exceptions/workflow.py b/src/sio3pack/exceptions/workflow.py index ca39d5f..a33aa60 100644 --- a/src/sio3pack/exceptions/workflow.py +++ b/src/sio3pack/exceptions/workflow.py @@ -4,11 +4,15 @@ class WorkflowCreationError(SIO3PackException): - """Raised when there is an error creating a workflow.""" + """ + Raised when there is an error creating a workflow. + """ class ParsingFailedOn(Enum): - """Enum to represent the part of the workflow that failed to parse.""" + """ + Enum to represent the part of the workflow that failed to parse. + """ JSON = "json" WORKFLOW = "workflow" @@ -23,16 +27,27 @@ class ParsingFailedOn(Enum): class WorkflowParsingError(SIO3PackException): - """Raised when there is an error parsing a workflow.""" + """ + Raised when there is an error parsing a workflow configuration. + + :param str message: A short description of the error. + :param ParsingFailedOn failed_on: The part of the workflow that failed to parse. + :param str extra_msg: Additional message to append to the error message. + :param dict data: Additional data related to the error. + :param str full_message: A full message describing the error, if available. + """ def __init__( - self, message, failed_on: ParsingFailedOn, extra_msg: str = None, data: dict = None, full_message: str = None + self, message: str, failed_on: ParsingFailedOn, extra_msg: str = None, data: dict = None, full_message: str = None ): """ Initialize the WorkflowParsingError. - :param message: A short description of the error. - :param failed_on: The part of the workflow that failed to parse. + :param str message: A short description of the error. + :param ParsingFailedOn failed_on: The part of the workflow that failed to parse. + :param str extra_msg: Additional message to append to the error message. + :param dict data: Additional data related to the error. + :param str full_message: A full message describing the error, if available. """ super().__init__(message) self.message = message From 1d43e6e232d5bce3bcd85cf9f80cecd1244a5aaf Mon Sep 17 00:00:00 2001 From: Mateusz Masiarz Date: Thu, 5 Jun 2025 16:12:32 +0200 Subject: [PATCH 07/10] Run formatters --- src/sio3pack/exceptions/packages.py | 2 ++ src/sio3pack/exceptions/workflow.py | 7 ++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/sio3pack/exceptions/packages.py b/src/sio3pack/exceptions/packages.py index b8a14f2..3751c05 100644 --- a/src/sio3pack/exceptions/packages.py +++ b/src/sio3pack/exceptions/packages.py @@ -9,6 +9,7 @@ class UnknownPackageType(SIO3PackException): :param str | int arg: The path to the package file or the problem ID. """ + def __init__(self, arg: str | int) -> None: """ Initialize the UnknownPackageType exception. @@ -34,6 +35,7 @@ class ImproperlyConfigured(SIO3PackException): Exception raised when the package is improperly configured, i.e., using Django features without Django being installed. """ + pass diff --git a/src/sio3pack/exceptions/workflow.py b/src/sio3pack/exceptions/workflow.py index a33aa60..b2f92a6 100644 --- a/src/sio3pack/exceptions/workflow.py +++ b/src/sio3pack/exceptions/workflow.py @@ -38,7 +38,12 @@ class WorkflowParsingError(SIO3PackException): """ def __init__( - self, message: str, failed_on: ParsingFailedOn, extra_msg: str = None, data: dict = None, full_message: str = None + self, + message: str, + failed_on: ParsingFailedOn, + extra_msg: str = None, + data: dict = None, + full_message: str = None, ): """ Initialize the WorkflowParsingError. From a83dfffc8a39ee1b1653b7726868fbca74704867 Mon Sep 17 00:00:00 2001 From: Mateusz Masiarz Date: Thu, 5 Jun 2025 16:15:32 +0200 Subject: [PATCH 08/10] Fix test --- tests/packages/sinolpack/test_sinolpack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/packages/sinolpack/test_sinolpack.py b/tests/packages/sinolpack/test_sinolpack.py index 4cabd6a..62bfe3f 100644 --- a/tests/packages/sinolpack/test_sinolpack.py +++ b/tests/packages/sinolpack/test_sinolpack.py @@ -25,6 +25,6 @@ def test_no_django(get_package): with pytest.raises(ImproperlyConfigured): sio3pack.from_db(1) - package = sio3pack.from_file(package_info.path) + package = sio3pack.from_file(package_info.path, SIO3PackConfig(allow_unrecognized_files=True)) with pytest.raises(ImproperlyConfigured): package.save_to_db(1) From 9e4944a5bc3709bb322952e3227f1fd74b11b2ed Mon Sep 17 00:00:00 2001 From: Mateusz Masiarz Date: Fri, 6 Jun 2025 15:01:30 +0200 Subject: [PATCH 09/10] Bump version --- src/sio3pack/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sio3pack/__init__.py b/src/sio3pack/__init__.py index 8504354..a611072 100644 --- a/src/sio3pack/__init__.py +++ b/src/sio3pack/__init__.py @@ -1,4 +1,4 @@ -__version__ = "1.0.0.dev3" +__version__ = "1.0.0.dev4" from sio3pack.exceptions import ImproperlyConfigured from sio3pack.files import LocalFile From 6edbd395188823f0846164febd45575a95fa38c6 Mon Sep 17 00:00:00 2001 From: Mateusz Masiarz Date: Fri, 6 Jun 2025 15:20:31 +0200 Subject: [PATCH 10/10] Cleanup --- src/sio3pack/exceptions/packages.py | 3 ++- src/sio3pack/files/local_file.py | 4 ++-- src/sio3pack/packages/sinolpack/model.py | 2 +- src/sio3pack/utils/archive.py | 2 +- src/sio3pack/visualizer/__init__.py | 2 +- 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/sio3pack/exceptions/packages.py b/src/sio3pack/exceptions/packages.py index 3751c05..b4ef54a 100644 --- a/src/sio3pack/exceptions/packages.py +++ b/src/sio3pack/exceptions/packages.py @@ -20,7 +20,8 @@ def __init__(self, arg: str | int) -> None: self.path = arg super().__init__( f"Unknown package type for file {arg}.", - "Tried to load a package which is not a recognized package type. " f"The package is located at: {arg}", + "Tried to load a package which is not a recognized package type. Read the documentation " + "to learn how to create a package and which are supported.", ) else: self.problem_id = arg diff --git a/src/sio3pack/files/local_file.py b/src/sio3pack/files/local_file.py index b80ac81..caa0dbf 100644 --- a/src/sio3pack/files/local_file.py +++ b/src/sio3pack/files/local_file.py @@ -26,7 +26,7 @@ def get_file_matching_extension(cls, dir: str, filename: str, extensions: list[s path = os.path.join(dir, filename + "." + ext) if os.path.exists(path): return cls(path) - raise FileNotFoundError + raise FileNotFoundError("No file found with the given filename and extensions in the directory.") def __init__(self, path: str, exists=True): """ @@ -37,7 +37,7 @@ def __init__(self, path: str, exists=True): :raises FileNotFoundError: If the file doesn't exist. """ if not os.path.exists(path) and exists: - raise FileNotFoundError + raise FileNotFoundError(f"File {path} does not exist.") super().__init__(path) self.filename = os.path.basename(path) diff --git a/src/sio3pack/packages/sinolpack/model.py b/src/sio3pack/packages/sinolpack/model.py index 87bbb8e..23f3885 100644 --- a/src/sio3pack/packages/sinolpack/model.py +++ b/src/sio3pack/packages/sinolpack/model.py @@ -125,7 +125,7 @@ def _from_file(self, file: LocalFile, configuration: SIO3PackConfig = None): raise WorkflowParsingError( f"Invalid JSON in workflows.json: {e}", ParsingFailedOn.JSON, - full_message="Invalid JSON in workflows.json file. " "Please check the file for syntax errors.", + full_message="Invalid JSON in workflows.json file. Please check the file for syntax errors.", ) else: self.workflow_manager = self._default_workflow_manager() diff --git a/src/sio3pack/utils/archive.py b/src/sio3pack/utils/archive.py index 6a6b8f5..9a532c1 100644 --- a/src/sio3pack/utils/archive.py +++ b/src/sio3pack/utils/archive.py @@ -174,7 +174,7 @@ def check_files(self, to_path=None): extract_path = os.path.normpath(os.path.realpath(extract_path)) if not extract_path.startswith(target_path): raise UnsafeArchive( - "Archive member destination is outside the target" " directory. member: %s" % filename + "Archive member destination is outside the target directory. member: %s" % filename ) diff --git a/src/sio3pack/visualizer/__init__.py b/src/sio3pack/visualizer/__init__.py index 22b4551..485a3ad 100644 --- a/src/sio3pack/visualizer/__init__.py +++ b/src/sio3pack/visualizer/__init__.py @@ -7,7 +7,7 @@ import dash_cytoscape as cyto from dash import Input, Output, State, dcc, html except ImportError: - raise ImportError("Please install the 'dash' and 'dash-cytoscape' packages to use the visualizer.") + raise ImportError("Please install sio3pack with `pip install sio3pack[vis]` to use the visualizer.") import json import os