diff --git a/.gitignore b/.gitignore index 04cf23b71f..741a67ac53 100644 --- a/.gitignore +++ b/.gitignore @@ -127,3 +127,10 @@ package-lock.json # Dev .local_dev/ + +# AI +.cursor/ +CLAUDE.md +.agent-os/ +.cursorrules +.claude/ diff --git a/ocp_resources/pipeline.py b/ocp_resources/pipeline.py index 2df699f1dd..dc8d6039c5 100644 --- a/ocp_resources/pipeline.py +++ b/ocp_resources/pipeline.py @@ -1,45 +1,91 @@ -# API reference: https://tekton.dev/docs/pipelines/pipelines/ +# Generated using https://github.com/RedHatQE/openshift-python-wrapper/blob/main/scripts/resource/README.md -from ocp_resources.resource import MissingRequiredArgumentError, NamespacedResource + +from typing import Any +from ocp_resources.resource import NamespacedResource class Pipeline(NamespacedResource): - api_group = NamespacedResource.ApiGroup.TEKTON_DEV + """ + Pipeline describes a list of Tasks to execute. It expresses how outputs + of tasks feed into inputs of subsequent tasks. + """ + + api_group: str = NamespacedResource.ApiGroup.TEKTON_DEV def __init__( self, - tasks=None, - params=None, - final_parallel_tasks=None, - **kwargs, - ): - """ + description: str | None = None, + display_name: str | None = None, + finally_: list[Any] | None = None, + params: list[Any] | None = None, + results: list[Any] | None = None, + tasks: list[Any] | None = None, + workspaces: list[Any] | None = None, + **kwargs: Any, + ) -> None: + r""" Args: - tasks (str, optional): actions to perform in pipeline - params (dict, optional): params to support pipelines. - params can be set/changed based on tasks. - example: 'spec': {'params': [{'name': 'sourceTemplateName','type': 'string','default':'openshift'}, - {'name': 'sourceTemplateNamespace', 'type':'string', 'description': 'Namespace pf template'}]} - final_parallel_tasks (list, optional): a list of one or more to be executed in parallel after all other - tasks have completed in parallel. - spec section can't be empty. It requires at least one optional field. + description (str): Description is a user-facing description of the pipeline that may be + used to populate a UI. + + display_name (str): DisplayName is a user-facing name of the pipeline that may be used to + populate a UI. + + finally_ (list[Any]): Finally declares the list of Tasks that execute just before leaving + the Pipeline i.e. either after all Tasks are finished executing + successfully or after a failure which would result in ending the + Pipeline + + Note: Parameter renamed from 'finally' to avoid Python keyword conflict. + params (list[Any]): Params declares a list of input parameters that must be supplied when + this Pipeline is run. + + results (list[Any]): Results are values that this pipeline can output once run + + tasks (list[Any]): Tasks declares the graph of Tasks that execute when this Pipeline is + run. + + workspaces (list[Any]): Workspaces declares a set of named workspaces that are expected to be + provided by a PipelineRun. + """ super().__init__(**kwargs) - # TODO: Add a check for tasks when bug https://issues.redhat.com/browse/SRVKP-3019 is resolved. - self.tasks = tasks + + self.description = description + self.display_name = display_name + self.finally_ = finally_ self.params = params - self.final_parallel_tasks = final_parallel_tasks + self.results = results + self.tasks = tasks + self.workspaces = workspaces def to_dict(self) -> None: super().to_dict() - if not self.kind_dict and not self.yaml_file: - if not (self.tasks or self.params or self.final_parallel_tasks): - raise MissingRequiredArgumentError(argument="'tasks' or 'params' or 'final_parallel_tasks'") + if not self.kind_dict and not self.yaml_file: self.res["spec"] = {} - if self.params: - self.res["spec"]["params"] = self.params - if self.tasks: - self.res["spec"]["tasks"] = self.tasks - if self.final_parallel_tasks: - self.res["spec"]["finally"] = self.final_parallel_tasks + _spec = self.res["spec"] + + if self.description is not None: + _spec["description"] = self.description + + if self.display_name is not None: + _spec["displayName"] = self.display_name + + if self.finally_ is not None: + _spec["finally"] = self.finally_ + + if self.params is not None: + _spec["params"] = self.params + + if self.results is not None: + _spec["results"] = self.results + + if self.tasks is not None: + _spec["tasks"] = self.tasks + + if self.workspaces is not None: + _spec["workspaces"] = self.workspaces + + # End of generated code diff --git a/ocp_resources/pipeline_run.py b/ocp_resources/pipeline_run.py new file mode 100644 index 0000000000..9ea3dfad5a --- /dev/null +++ b/ocp_resources/pipeline_run.py @@ -0,0 +1,89 @@ +# Generated using https://github.com/RedHatQE/openshift-python-wrapper/blob/main/scripts/resource/README.md + + +from typing import Any +from ocp_resources.resource import NamespacedResource + + +class PipelineRun(NamespacedResource): + """ + PipelineRun represents a single execution of a Pipeline. PipelineRuns are how + the graph of Tasks declared in a Pipeline are executed; they specify inputs + to Pipelines such as parameter values and capture operational aspects of the + Tasks execution such as service account and tolerations. Creating a + PipelineRun creates TaskRuns for Tasks in the referenced Pipeline. + """ + + api_group: str = NamespacedResource.ApiGroup.TEKTON_DEV + + def __init__( + self, + params: list[Any] | None = None, + pipeline_ref: dict[str, Any] | None = None, + pipeline_spec: Any | None = None, + task_run_specs: list[Any] | None = None, + task_run_template: dict[str, Any] | None = None, + timeouts: dict[str, Any] | None = None, + workspaces: list[Any] | None = None, + **kwargs: Any, + ) -> None: + r""" + Args: + params (list[Any]): Params is a list of parameter names and values. + + pipeline_ref (dict[str, Any]): PipelineRef can be used to refer to a specific instance of a Pipeline. + + pipeline_spec (Any): Specifying PipelineSpec can be disabled by setting `disable-inline- + spec` feature flag. See Pipeline.spec (API version: tekton.dev/v1) + + task_run_specs (list[Any]): TaskRunSpecs holds a set of runtime specs + + task_run_template (dict[str, Any]): TaskRunTemplate represent template of taskrun + + timeouts (dict[str, Any]): Time after which the Pipeline times out. Currently three keys are + accepted in the map pipeline, tasks and finally with + Timeouts.pipeline >= Timeouts.tasks + Timeouts.finally + + workspaces (list[Any]): Workspaces holds a set of workspace bindings that must match names + with those declared in the pipeline. + + """ + super().__init__(**kwargs) + + self.params = params + self.pipeline_ref = pipeline_ref + self.pipeline_spec = pipeline_spec + self.task_run_specs = task_run_specs + self.task_run_template = task_run_template + self.timeouts = timeouts + self.workspaces = workspaces + + def to_dict(self) -> None: + super().to_dict() + + if not self.kind_dict and not self.yaml_file: + self.res["spec"] = {} + _spec = self.res["spec"] + + if self.params is not None: + _spec["params"] = self.params + + if self.pipeline_ref is not None: + _spec["pipelineRef"] = self.pipeline_ref + + if self.pipeline_spec is not None: + _spec["pipelineSpec"] = self.pipeline_spec + + if self.task_run_specs is not None: + _spec["taskRunSpecs"] = self.task_run_specs + + if self.task_run_template is not None: + _spec["taskRunTemplate"] = self.task_run_template + + if self.timeouts is not None: + _spec["timeouts"] = self.timeouts + + if self.workspaces is not None: + _spec["workspaces"] = self.workspaces + + # End of generated code diff --git a/ocp_resources/pipelineruns.py b/ocp_resources/pipelineruns.py deleted file mode 100644 index 9ab695c918..0000000000 --- a/ocp_resources/pipelineruns.py +++ /dev/null @@ -1,43 +0,0 @@ -# API reference: https://tekton.dev/docs/pipelines/pipelineruns/ - -from ocp_resources.resource import MissingRequiredArgumentError, NamespacedResource - - -class PipelineRun(NamespacedResource): - api_group = NamespacedResource.ApiGroup.TEKTON_DEV - - def __init__( - self, - pipelineref=None, - params=None, - service_account_name=None, - **kwargs, - ): - """ - Args: - pipelineref (str): Mandatory: Base pipeline to run pipelineruns - params (dict): Optional params to add during triggering a run. - params can be set/changed based on pipelineref. - example : params={"param_name1":"param_value1", "param_name2":"param_value2"} - service_account_name (str): Optional to provide service account - """ - super().__init__(**kwargs) - self.pipelineref = pipelineref - self.params = params - self.service_account_name = service_account_name - - def to_dict(self) -> None: - super().to_dict() - if not self.kind_dict and not self.yaml_file: - if not self.pipelineref: - raise MissingRequiredArgumentError(argument="pipelineref") - self.res["spec"] = {} - self.res["spec"]["pipelineref"] = {"name": self.pipelineref} - - if self.params: - self.res["spec"]["params"] = [ - {"name": params_name, "value": params_value} for params_name, params_value in self.params.items() - ] - - if self.service_account_name: - self.res["spec"]["serviceAccountName"] = self.service_account_name diff --git a/ocp_resources/project_request.py b/ocp_resources/project_request.py index 2f84b9f6c6..ca33bda830 100644 --- a/ocp_resources/project_request.py +++ b/ocp_resources/project_request.py @@ -2,8 +2,8 @@ from typing import Any, Optional -from ocp_resources.project_project_openshift_io import Project from ocp_resources.resource import Resource +from ocp_resources.project_project_openshift_io import Project class ProjectRequest(Resource): @@ -45,5 +45,18 @@ def to_dict(self) -> None: # End of generated code + def deploy(self, wait: bool = False) -> Project: + super().deploy(wait=wait) + + project = Project( + name=self.name, + client=self.client, + teardown=self.teardown, + delete_timeout=self.delete_timeout, + ) + project.wait_for_status(status=project.Status.ACTIVE) + + return project + def clean_up(self, wait: bool = True, timeout: Optional[int] = None) -> bool: return Project(name=self.name, client=self.client).clean_up(wait=wait, timeout=timeout) diff --git a/ocp_resources/resource.py b/ocp_resources/resource.py index 85264bc015..1169e00fa5 100644 --- a/ocp_resources/resource.py +++ b/ocp_resources/resource.py @@ -978,7 +978,12 @@ def wait_for_status( self.logger.error(f"Status of {self.kind} {self.name} is {current_status}") raise - def create(self, wait: bool = False) -> ResourceInstance | None: + def create( + self, + wait: bool = False, + exceptions_dict: dict[type[Exception], list[str]] = DEFAULT_CLUSTER_RETRY_EXCEPTIONS + | PROTOCOL_ERROR_EXCEPTION_DICT, + ) -> ResourceInstance | None: """ Create resource. @@ -986,7 +991,7 @@ def create(self, wait: bool = False) -> ResourceInstance | None: wait (bool) : True to wait for resource status. Returns: - bool: True if create succeeded, False otherwise. + ResourceInstance | None: Created resource instance or None if create failed. """ self.to_dict() @@ -994,10 +999,13 @@ def create(self, wait: bool = False) -> ResourceInstance | None: self.logger.info(f"Create {self.kind} {self.name}") self.logger.info(f"Posting {hashed_res}") self.logger.debug(f"\n{yaml.dump(hashed_res)}") + resource_kwargs: dict[str, Any] = {"body": self.res, "namespace": self.namespace} resource_kwargs = {"body": self.res, "namespace": self.namespace} if self.dry_run: resource_kwargs["dry_run"] = "All" - resource_ = self.api.create(**resource_kwargs) + resource_ = Resource.retry_cluster_exceptions( + func=self.api.create, exceptions_dict=exceptions_dict, **resource_kwargs + ) with contextlib.suppress(ForbiddenError, AttributeError, NotFoundError): # some resources do not support get() (no instance) or the client do not have permissions self.initial_resource_version = self.instance.metadata.resourceVersion diff --git a/ocp_resources/task.py b/ocp_resources/task.py index ae73b0b633..65bb3dee5b 100644 --- a/ocp_resources/task.py +++ b/ocp_resources/task.py @@ -1,76 +1,107 @@ -from typing import Any, Dict, List, Optional -from ocp_resources.resource import NamespacedResource, MissingRequiredArgumentError +# Generated using https://github.com/RedHatQE/openshift-python-wrapper/blob/main/scripts/resource/README.md + + +from typing import Any +from ocp_resources.resource import NamespacedResource class Task(NamespacedResource): """ - A collection of Steps for continuous integration flow, executed as a Pod on a Kubernetes cluster. - API Reference: https://tekton.dev/docs/pipelines/tasks/#configuring-a-task + Task represents a collection of sequential steps that are run as part of a + Pipeline using a set of inputs and producing a set of outputs. Tasks execute + when TaskRuns are created that provide the input parameters and resources and + output resources the Task requires. """ api_group: str = NamespacedResource.ApiGroup.TEKTON_DEV def __init__( self, - steps: Optional[List[Dict[str, Any]]] = None, - description: Optional[str] = None, - params: Optional[List[Dict[str, str]]] = None, - workspaces: Optional[List[Dict[str, Any]]] = None, - results: Optional[List[Dict[str, Any]]] = None, - volumes: Optional[List[Dict[str, Dict[str, Any]]]] = None, - step_template: Optional[Dict[str, Any]] = None, - sidecars: Optional[List[Dict[str, Any]]] = None, + description: str | None = None, + display_name: str | None = None, + params: list[Any] | None = None, + results: list[Any] | None = None, + sidecars: list[Any] | None = None, + step_template: dict[str, Any] | None = None, + steps: list[Any] | None = None, + volumes: Any | None = None, + workspaces: list[Any] | None = None, **kwargs: Any, - ): - """ - Create and manage Task which specifies a sequence of steps to be executed. - + ) -> None: + r""" Args: - steps (List[Dict[str, Any]]): Specifies one or more container images to run in the Task. - description (Optional[str]): An informative description of the Task. - params (Optional[List[Dict[str, str]]]): Specifies execution parameters for the Task. - workspaces (Optional[List[Dict[str, Any]]]): Specifies paths to volumes required by the Task. - results (Optional[List[Dict[str, Any]]]): Specifies the names under which Tasks write execution results. - volumes (Optional[List[Dict[str, Dict[str, Any]]]]): Specifies one or more volumes that will be available to the Steps in the Task. - step_template (Optional[Dict[str, Any]]): Specifies a Container step definition to use as the basis for all Steps in the Task. - sidecars (Optional[List[Dict[str, Any]]]): Specifies Sidecar containers to run alongside the Steps in the Task. + description (str): Description is a user-facing description of the task that may be used + to populate a UI. + + display_name (str): DisplayName is a user-facing name of the task that may be used to + populate a UI. + + params (list[Any]): Params is a list of input parameters required to run the task. Params + must be supplied as inputs in TaskRuns unless they declare a + default value. + + results (list[Any]): Results are values that this Task can output + + sidecars (list[Any]): Sidecars are run alongside the Task's step containers. They begin + before the steps start and end after the steps complete. + + step_template (dict[str, Any]): StepTemplate can be used as the basis for all step containers within + the Task, so that the steps inherit settings on the base + container. + + steps (list[Any]): Steps are the steps of the build; each step is run sequentially with + the source mounted into /workspace. + + volumes (Any): Volumes is a collection of volumes that are available to mount into + the steps of the build. See Pod.spec.volumes (API version: v1) + + workspaces (list[Any]): Workspaces are the volumes that this Task requires. + """ super().__init__(**kwargs) - self.steps = steps + self.description = description + self.display_name = display_name self.params = params - self.workspaces = workspaces self.results = results - self.volumes = volumes - self.step_template = step_template self.sidecars = sidecars + self.step_template = step_template + self.steps = steps + self.volumes = volumes + self.workspaces = workspaces def to_dict(self) -> None: super().to_dict() + if not self.kind_dict and not self.yaml_file: - if not self.steps: - raise MissingRequiredArgumentError(argument="steps") self.res["spec"] = {} _spec = self.res["spec"] - _spec = {"steps": self.steps} - if self.description: + if self.description is not None: _spec["description"] = self.description - if self.params: - _spec["params"] = self.params + if self.display_name is not None: + _spec["displayName"] = self.display_name - if self.workspaces: - _spec["workspaces"] = self.workspaces + if self.params is not None: + _spec["params"] = self.params - if self.results: + if self.results is not None: _spec["results"] = self.results - if self.volumes: - _spec["volumes"] = self.volumes + if self.sidecars is not None: + _spec["sidecars"] = self.sidecars - if self.step_template: + if self.step_template is not None: _spec["stepTemplate"] = self.step_template - if self.sidecars: - _spec["sidecars"] = self.sidecars + if self.steps is not None: + _spec["steps"] = self.steps + + if self.volumes is not None: + _spec["volumes"] = self.volumes + + if self.workspaces is not None: + _spec["workspaces"] = self.workspaces + + # End of generated code diff --git a/ocp_resources/task_run.py b/ocp_resources/task_run.py index 514c82c995..bca85ba42a 100644 --- a/ocp_resources/task_run.py +++ b/ocp_resources/task_run.py @@ -1,64 +1,135 @@ -# API reference: https://tekton.dev/docs/pipelines/taskruns/ +# Generated using https://github.com/RedHatQE/openshift-python-wrapper/blob/main/scripts/resource/README.md -from ocp_resources.resource import MissingRequiredArgumentError, NamespacedResource + +from typing import Any +from ocp_resources.resource import NamespacedResource class TaskRun(NamespacedResource): - api_group = NamespacedResource.ApiGroup.TEKTON_DEV + """ + TaskRun represents a single execution of a Task. TaskRuns are how the steps + specified in a Task are executed; they specify the parameters and resources + used to run the steps in a Task. + """ + + api_group: str = NamespacedResource.ApiGroup.TEKTON_DEV def __init__( self, - task_ref=None, - task_spec=None, - params=None, - service_account_name=None, - taskrun_timeout=None, - **kwargs, - ): - """ - Create and manage TaskRun which allows you to instantiate and execute a Task on cluster - + compute_resources: dict[str, Any] | None = None, + debug: dict[str, Any] | None = None, + params: list[Any] | None = None, + pod_template: dict[str, Any] | None = None, + retries: int | None = None, + service_account_name: str | None = None, + sidecar_specs: list[Any] | None = None, + status_message: str | None = None, + step_specs: list[Any] | None = None, + task_ref: dict[str, Any] | None = None, + task_spec: Any | None = None, + timeout: str | None = None, + workspaces: list[Any] | None = None, + **kwargs: Any, + ) -> None: + r""" Args: - task_ref (str): Base task to run taskrun. Mandatory if task_spec is not provided. - task_spec (str): Base task to run taskrun. Mandatory if task_ref is not provided. - params (dict, optional): Params to add during triggering a run. - params can be set/changed based on task_ref. - example : params={"param_name1":"param_value1", "param_name2":"param_value2"} - service_account_name (str, optional): Provide service account - taskrun_timeout (str, optional): Specifies the taskrun_timeout before the taskrun fails + compute_resources (dict[str, Any]): Compute resources to use for this TaskRun + + debug (dict[str, Any]): TaskRunDebug defines the breakpoint config for a particular TaskRun + + params (list[Any]): Params is a list of Param + + pod_template (dict[str, Any]): PodTemplate holds pod specific configuration + + retries (int): Retries represents how many times this TaskRun should be retried in + the event of task failure. + + service_account_name (str): No field description from API + + sidecar_specs (list[Any]): Specs to apply to Sidecars in this TaskRun. If a field is specified in + both a Sidecar and a SidecarSpec, the value from the SidecarSpec + will be used. This field is only supported when the alpha feature + gate is enabled. + + status_message (str): Status message for cancellation. + + step_specs (list[Any]): Specs to apply to Steps in this TaskRun. If a field is specified in + both a Step and a StepSpec, the value from the StepSpec will be + used. This field is only supported when the alpha feature gate is + enabled. + + task_ref (dict[str, Any]): no more than one of the TaskRef and TaskSpec may be specified. + + task_spec (Any): Specifying TaskSpec can be disabled by setting `disable-inline-spec` + feature flag. See Task.spec (API version: tekton.dev/v1) + + timeout (str): Time after which one retry attempt times out. Defaults to 1 hour. + Refer Go's ParseDuration documentation for expected format: + https://golang.org/pkg/time/#ParseDuration + + workspaces (list[Any]): Workspaces is a list of WorkspaceBindings from volumes to workspaces. + """ - super().__init__( - **kwargs, - ) - self.task_ref = task_ref - self.task_spec = task_spec + super().__init__(**kwargs) + + self.compute_resources = compute_resources + self.debug = debug self.params = params + self.pod_template = pod_template + self.retries = retries self.service_account_name = service_account_name - self.taskrun_timeout = taskrun_timeout + self.sidecar_specs = sidecar_specs + self.status_message = status_message + self.step_specs = step_specs + self.task_ref = task_ref + self.task_spec = task_spec + self.timeout = timeout + self.workspaces = workspaces def to_dict(self) -> None: super().to_dict() + if not self.kind_dict and not self.yaml_file: - if not (self.task_ref or self.task_spec): - raise MissingRequiredArgumentError(argument="'task_ref' or 'task_spec'") + self.res["spec"] = {} + _spec = self.res["spec"] - if self.task_ref and self.task_spec: - raise ValueError("Validation failed: expected exactly one either task_ref or task_spec, got both") + if self.compute_resources is not None: + _spec["computeResources"] = self.compute_resources - self.res["spec"] = {} - if self.task_ref: - self.res["spec"]["taskRef"] = {"name": self.task_ref} + if self.debug is not None: + _spec["debug"] = self.debug + + if self.params is not None: + _spec["params"] = self.params + + if self.pod_template is not None: + _spec["podTemplate"] = self.pod_template + + if self.retries is not None: + _spec["retries"] = self.retries + + if self.service_account_name is not None: + _spec["serviceAccountName"] = self.service_account_name + + if self.sidecar_specs is not None: + _spec["sidecarSpecs"] = self.sidecar_specs + + if self.status_message is not None: + _spec["statusMessage"] = self.status_message + + if self.step_specs is not None: + _spec["stepSpecs"] = self.step_specs + + if self.task_ref is not None: + _spec["taskRef"] = self.task_ref - if self.task_spec: - self.res["spec"]["taskSpec"] = {"name": self.task_spec} + if self.task_spec is not None: + _spec["taskSpec"] = self.task_spec - if self.params: - self.res["spec"]["params"] = [ - {"name": params_name, "value": params_value} for params_name, params_value in self.params.items() - ] + if self.timeout is not None: + _spec["timeout"] = self.timeout - if self.taskrun_timeout: - self.res["spec"]["taskrun_timeout"] = self.taskrun_timeout + if self.workspaces is not None: + _spec["workspaces"] = self.workspaces - if self.service_account_name: - self.res["spec"]["serviceAccountName"] = self.service_account_name + # End of generated code diff --git a/pyproject.toml b/pyproject.toml index ad58f1714c..0087a6ab4b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.pytest.ini_options] markers = [ "incremental: Mark tests as incremental", - "kubevirt: Mark tests as kubevirt tests", + "kubevirt: Mark tests as kubevirt tests" ] addopts = [ "--pdbcls=IPython.terminal.debugger:TerminalPdb", @@ -13,11 +13,15 @@ addopts = [ "--cov=ocp_resources.utils.constants", "--cov=ocp_resources.utils.utils", "--cov=ocp_resources.exceptions", - "--cov=class_generator", + "--cov=class_generator" ] [tool.coverage.run] -omit = ["tests/*", "class_generator/tests/*", "class_generator/scripts/tests/*"] +omit = [ + "tests/*", + "class_generator/tests/*", + "class_generator/scripts/tests/*" +] [tool.coverage.report] fail_under = 60 @@ -32,8 +36,8 @@ line-length = 120 fix = true output-format = "grouped" -[tool.ruff.format] -exclude = [".git", ".venv", ".mypy_cache", ".tox", "__pycache__"] + [tool.ruff.format] + exclude = [ ".git", ".venv", ".mypy_cache", ".tox", "__pycache__" ] [tool.mypy] no_implicit_optional = true @@ -41,22 +45,22 @@ show_error_codes = true warn_unused_ignores = true [tool.hatch.build.targets.wheel] -packages = ["ocp_resources", "class_generator", "fake_kubernetes_client"] +packages = [ "ocp_resources", "class_generator", "fake_kubernetes_client" ] [tool.uv] -dev-dependencies = ["ipdb>=0.13.13", "ipython>=8.12.3"] +dev-dependencies = [ "ipdb>=0.13.13", "ipython>=8.12.3" ] [project] requires-python = ">=3.9" name = "openshift-python-wrapper" -version = "11.0.67" +version = "4.19.4" description = "Wrapper around https://github.com/kubernetes-client/python" readme = "README.md" license = "Apache-2.0" -keywords = ["Openshift", "Kubevirt", "Openshift Virtualization"] +keywords = [ "Openshift", "Kubevirt", "Openshift Virtualization" ] classifiers = [ "Programming Language :: Python :: 3", - "Operating System :: OS Independent", + "Operating System :: OS Independent" ] dependencies = [ "cloup>=3.0.5", @@ -72,40 +76,37 @@ dependencies = [ "rich>=13.9.2", "ruff>=0.6.9", "timeout-sampler>=0.0.46", - "xmltodict>=0.13.0", + "xmltodict>=0.13.0" ] -[[project.authors]] -name = "Meni Yakove" -email = "myakove@gmail.com" + [[project.authors]] + name = "Meni Yakove" + email = "myakove@gmail.com" -[[project.authors]] -name = "Ruth Netser" -email = "rnetser@gmail.com" + [[project.authors]] + name = "Ruth Netser" + email = "rnetser@gmail.com" -[[project.maintainers]] -name = "Meni Yakove" -email = "myakove@gmail.com" + [[project.maintainers]] + name = "Meni Yakove" + email = "myakove@gmail.com" -[[project.maintainers]] -name = "Ruth Netser" -email = "rnetser@gmail.com" + [[project.maintainers]] + name = "Ruth Netser" + email = "rnetser@gmail.com" -[project.urls] -homepage = "https://github.com/RedHatQE/openshift-python-wrapper" -documentation = "https://openshift-python-wrapper.readthedocs.io/en/latest/" -Download = "https://pypi.org/project/openshift-python-wrapper/" -"Bug Tracker" = "https://github.com/RedHatQE/openshift-python-wrapper/issues" + [project.urls] + homepage = "https://github.com/RedHatQE/openshift-python-wrapper" + documentation = "https://openshift-python-wrapper.readthedocs.io/en/latest/" + Download = "https://pypi.org/project/openshift-python-wrapper/" + "Bug Tracker" = "https://github.com/RedHatQE/openshift-python-wrapper/issues" -[project.scripts] -class-generator = "class_generator.class_generator:main" + [project.scripts] + class-generator = "class_generator.class_generator:main" [build-system] -requires = ["hatchling"] +requires = [ "hatchling" ] build-backend = "hatchling.build" [dependency-groups] -tests = [ - "pytest>=8.3.5", - "pytest-cov>=6.1.1", -] +tests = [ "pytest>=8.3.5", "pytest-cov>=6.1.1" ] diff --git a/uv.lock b/uv.lock index 6b963b1d5e..b5a3bfbca4 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.9" resolution-markers = [ "python_full_version >= '3.11'", @@ -780,7 +780,7 @@ wheels = [ [[package]] name = "openshift-python-wrapper" -version = "11.0.67" +version = "4.19.4" source = { editable = "." } dependencies = [ { name = "cloup" },