diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 9b07ada11..147f8ff4b 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -153,18 +153,18 @@ To run these tests: When developing tests it can be a little frustrating to wait for GitLab to spin up every run. To prevent the containers from being cleaned up afterwards, pass -``--keep-containers`` to pytest, i.e.: +``--keep-containers-running`` to pytest, i.e.: .. code-block:: bash - tox -e api_func_v4 -- --keep-containers + tox -e api_func_v4 -- --keep-containers-running If you then wish to test against a clean slate, you may perform a manual clean up of the containers by running: .. code-block:: bash - docker-compose -f tests/functional/fixtures/docker-compose.yml -p pytest-python-gitlab down -v + docker-compose -f gitlab/testing/docker/docker-compose.yml -p pytest-python-gitlab down -v By default, the tests run against the latest version of the ``gitlab/gitlab-ce`` image. You can override both the image and tag by providing either the diff --git a/gitlab/testing/__init__.py b/gitlab/testing/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/functional/fixtures/.env b/gitlab/testing/docker/.env similarity index 100% rename from tests/functional/fixtures/.env rename to gitlab/testing/docker/.env diff --git a/gitlab/testing/docker/__init__.py b/gitlab/testing/docker/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/functional/fixtures/create_license.rb b/gitlab/testing/docker/create_license.rb similarity index 100% rename from tests/functional/fixtures/create_license.rb rename to gitlab/testing/docker/create_license.rb diff --git a/tests/functional/fixtures/docker-compose.yml b/gitlab/testing/docker/docker-compose.yml similarity index 87% rename from tests/functional/fixtures/docker-compose.yml rename to gitlab/testing/docker/docker-compose.yml index f36f3d2fd..ff3123b28 100644 --- a/tests/functional/fixtures/docker-compose.yml +++ b/gitlab/testing/docker/docker-compose.yml @@ -6,7 +6,7 @@ networks: services: gitlab: - image: '${GITLAB_IMAGE}:${GITLAB_TAG}' + image: '${GITLAB_IMAGE:-gitlab/gitlab-ee}:${GITLAB_TAG:-latest}' container_name: 'gitlab-test' hostname: 'gitlab.test' privileged: true # Just in case https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/1350 @@ -36,7 +36,7 @@ services: - -c - ruby /create_license.rb && /assets/wrapper volumes: - - ${PWD}/tests/functional/fixtures/create_license.rb:/create_license.rb + - ./create_license.rb:/create_license.rb ports: - '8080:80' - '2222:22' @@ -44,7 +44,7 @@ services: - gitlab-network gitlab-runner: - image: '${GITLAB_RUNNER_IMAGE}:${GITLAB_RUNNER_TAG}' + image: '${GITLAB_RUNNER_IMAGE:-gitlab/gitlab-runner}:${GITLAB_RUNNER_TAG:-latest}' container_name: 'gitlab-runner-test' depends_on: - gitlab diff --git a/tests/functional/fixtures/docker.py b/gitlab/testing/docker/docker.py similarity index 57% rename from tests/functional/fixtures/docker.py rename to gitlab/testing/docker/docker.py index 26bc440b5..6260abc7a 100644 --- a/tests/functional/fixtures/docker.py +++ b/gitlab/testing/docker/docker.py @@ -4,6 +4,7 @@ """ import pytest +import pytest_docker @pytest.fixture(scope="session") @@ -12,15 +13,27 @@ def docker_compose_project_name(): return "pytest-python-gitlab" +pytest_docker.docker_compose_project_name = docker_compose_project_name + + @pytest.fixture(scope="session") -def docker_compose_file(fixture_dir): - return fixture_dir / "docker-compose.yml" +def docker_compose_file(docker_assets_dir): + return docker_assets_dir / "docker-compose.yml" + + +pytest_docker.docker_compose_file = docker_compose_file @pytest.fixture(scope="session") def docker_cleanup(request): """Conditionally keep containers around by overriding the cleanup command.""" - if request.config.getoption("--keep-containers"): + if request.config.getoption("--keep-containers-running"): # Print version and exit. return "-v" - return "down -v" + if request.config.getoption("--keep-containers"): + # Stop the containers. + return "stop" + return "down" + + +pytest_docker.docker_cleanup = docker_cleanup diff --git a/gitlab/testing/docker/install_docker.sh b/gitlab/testing/docker/install_docker.sh new file mode 100644 index 000000000..648e4cdfa --- /dev/null +++ b/gitlab/testing/docker/install_docker.sh @@ -0,0 +1,11 @@ +apt-get update +apt-get install -y apt-transport-https ca-certificates curl gnupg2 software-properties-common +curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - +apt-get update +echo \ + "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu \ + $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ + tee /etc/apt/sources.list.d/docker.list > /dev/null +apt-get update +apt-get install -y docker-ce docker-compose +usermod -aG docker gitlab-runner \ No newline at end of file diff --git a/tests/functional/fixtures/set_token.rb b/gitlab/testing/docker/set_token.rb similarity index 100% rename from tests/functional/fixtures/set_token.rb rename to gitlab/testing/docker/set_token.rb diff --git a/gitlab/testing/fixtures/__init__.py b/gitlab/testing/fixtures/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/gitlab/testing/fixtures/gitlab.py b/gitlab/testing/fixtures/gitlab.py new file mode 100644 index 000000000..9f00eeb62 --- /dev/null +++ b/gitlab/testing/fixtures/gitlab.py @@ -0,0 +1,665 @@ +import contextlib +import dataclasses +import datetime +import logging +import tempfile +import time +import uuid +from pathlib import Path +from subprocess import check_output +from typing import TYPE_CHECKING + +import pytest +import requests + +import gitlab +import gitlab.base +from gitlab.testing.docker import * # noqa + +from . import helpers + + +@dataclasses.dataclass +class GitlabVersion: + major: int + minor: int + patch: str + revision: str + + def __post_init__(self): + self.major, self.minor = int(self.major), int(self.minor) + + +@pytest.fixture(scope="session") +def gitlab_version(gl) -> GitlabVersion: + version, revision = gl.version() + major, minor, patch = version.split(".") + return GitlabVersion(major=major, minor=minor, patch=patch, revision=revision) + + +@pytest.fixture(scope="session") +def docker_assets_dir() -> Path: + import gitlab.testing + + return Path(gitlab.testing.__file__).parent / "docker" + + +@pytest.fixture(scope="session") +def gitlab_service_name() -> str: + """The "service" name is the one defined in the `docker-compose.yml` file""" + return "gitlab" + + +@pytest.fixture(scope="session") +def gitlab_container_name() -> str: + """The "container" name is the one defined in the `docker-compose.yml` file + for the "gitlab" service""" + return "gitlab-test" + + +@pytest.fixture(scope="session") +def gitlab_docker_port(docker_services, gitlab_service_name: str) -> int: + port: int = docker_services.port_for(gitlab_service_name, container_port=80) + return port + + +@pytest.fixture(scope="session") +def gitlab_url(docker_ip: str, gitlab_docker_port: int) -> str: + return f"http://{docker_ip}:{gitlab_docker_port}" + + +def reset_gitlab(gl: gitlab.Gitlab) -> None: # noqa C901 + """Delete resources (such as projects, groups, users) that shouldn't + exist.""" + if helpers.get_gitlab_plan(gl): + logging.info("GitLab EE detected") + # NOTE(jlvillal, timknight): By default in GitLab EE it will wait 7 days before + # deleting a group or project. + # In GL 16.0 we need to call delete with `permanently_remove=True` for projects and sub groups + # (handled in helpers.py safe_delete) + settings = gl.settings.get() + modified_settings = False + if settings.deletion_adjourned_period != 1: + logging.info("Setting `deletion_adjourned_period` to 1 Day") + settings.deletion_adjourned_period = 1 + modified_settings = True + if modified_settings: + settings.save() + + for project in gl.projects.list(): + for deploy_token in project.deploytokens.list(): + logging.info( + f"Deleting deploy token: {deploy_token.username!r} in " + f"project: {project.path_with_namespace!r}" + ) + helpers.safe_delete(deploy_token) + logging.info(f"Deleting project: {project.path_with_namespace!r}") + helpers.safe_delete(project) + + for group in gl.groups.list(): + # skip deletion of a descendant group to prevent scenarios where parent group + # gets deleted leaving a dangling descendant whose deletion will throw 404s. + if group.parent_id: + logging.info( + f"Skipping deletion of {group.full_path} as it is a descendant " + f"group and will be removed when the parent group is deleted" + ) + continue + + for deploy_token in group.deploytokens.list(): + logging.info( + f"Deleting deploy token: {deploy_token.username!r} in " + f"group: {group.path_with_namespace!r}" + ) + helpers.safe_delete(deploy_token) + logging.info(f"Deleting group: {group.full_path!r}") + helpers.safe_delete(group) + for topic in gl.topics.list(): + logging.info(f"Deleting topic: {topic.name!r}") + helpers.safe_delete(topic) + for variable in gl.variables.list(): + logging.info(f"Deleting variable: {variable.key!r}") + helpers.safe_delete(variable) + for user in gl.users.list(): + if user.username not in ["root", "ghost"]: + logging.info(f"Deleting user: {user.username!r}") + helpers.safe_delete(user) + + logging.info("Waiting 2 seconds so stuff can be deleted...") + time.sleep(2) + + +def set_token(container: str, docker_assets_dir: Path, gitlab_url: str) -> str: + logging.info("Trying to load saved token") + gitlab_token_path = Path(".gitlab_token") + if gitlab_token_path.exists(): + logging.info("Found saved token. Checking if we can login") + saved_token = gitlab_token_path.read_text() + instance = gitlab.Gitlab(gitlab_url, private_token=saved_token) + with contextlib.suppress(gitlab.GitlabAuthenticationError): + instance.auth() + if instance.user is not None: + logging.info("That seems to have worked") + return saved_token + + logging.info("Creating API token.") + set_token_rb = docker_assets_dir / "set_token.rb" + + with Path(set_token_rb).open(encoding="utf-8") as f: + set_token_command = f.read().strip() + + rails_command = [ + "docker", + "exec", + container, + "gitlab-rails", + "runner", + set_token_command, + ] + output = check_output(rails_command).decode().strip() + logging.info("Finished creating API token.") + + gitlab_token_path.write_text(output) + + return output + + +def pytest_report_collectionfinish(config, startdir, items): + return [ + "", + "Starting GitLab container.", + "Waiting for GitLab to reconfigure.", + "This will take a few minutes.", + ] + + +def pytest_addoption(parser): + parser.addoption( + "--keep-containers-running", + action="store_true", + help="Keep containers running after testing", + ) + parser.addoption( + "--keep-containers", + action="store_true", + help="Only stop containers after testing", + ) + + +@pytest.fixture(scope="session") +def temp_dir() -> Path: + return Path(tempfile.gettempdir()) + + +@pytest.fixture(scope="session") +def check_is_alive(): + """ + Return a healthcheck function fixture for the GitLab container spinup. + """ + + def _check(*, container: str, start_time: float, gitlab_url: str) -> bool: + setup_time = time.perf_counter() - start_time + minutes, seconds = int(setup_time / 60), int(setup_time % 60) + logging.info( + f"Checking if GitLab container is up. " + f"Have been checking for {minutes} minute(s), {seconds} seconds ..." + ) + logs = ["docker", "logs", container] + if "gitlab Reconfigured!" not in check_output(logs).decode(): + return False + logging.debug("GitLab has finished reconfiguring.") + for check in ("health", "readiness", "liveness"): + url = f"{gitlab_url}/-/{check}" + logging.debug(f"Checking {check!r} endpoint at: {url}") + try: + result = requests.get(url, timeout=1.0) + except (requests.exceptions.Timeout, requests.exceptions.ConnectionError): + logging.info(f"{check!r} check timed out") + return False + if result.status_code != 200: # noqa PLR2004 + logging.info(f"{check!r} check did not return 200: {result!r}") + return False + logging.debug(f"{check!r} check passed: {result!r}") + return True + + return _check + + +@pytest.fixture(scope="session") +def gitlab_token( + check_is_alive, + gitlab_container_name: str, + gitlab_url: str, + docker_services, + docker_assets_dir: Path, +) -> str: + start_time = time.perf_counter() + logging.info("Waiting for GitLab container to become ready.") + docker_services.wait_until_responsive( + timeout=900, + pause=10, + check=lambda: check_is_alive( + container=gitlab_container_name, + start_time=start_time, + gitlab_url=gitlab_url, + ), + ) + setup_time = time.perf_counter() - start_time + minutes, seconds = int(setup_time / 60), int(setup_time % 60) + logging.info( + f"GitLab container is now ready after {minutes} minute(s), {seconds} seconds" + ) + + return set_token( + gitlab_container_name, + docker_assets_dir=docker_assets_dir, + gitlab_url=gitlab_url, + ) + + +@pytest.fixture(scope="session") +def gitlab_config(gitlab_url: str, gitlab_token: str, temp_dir: Path): + config_file = temp_dir / "python-gitlab.cfg" + + config = f"""[global] +default = local +timeout = 60 + +[local] +url = {gitlab_url} +private_token = {gitlab_token} +api_version = 4""" + + with Path(config_file).open("w", encoding="utf-8") as f: + f.write(config) + + return config_file + + +@pytest.fixture(scope="session") +def gl(gitlab_url: str, gitlab_token: str) -> gitlab.Gitlab: + """Helper instance to make fixtures and asserts directly via the API.""" + + logging.info("Instantiating python-gitlab gitlab.Gitlab instance") + instance = gitlab.Gitlab( + gitlab_url, + private_token=gitlab_token, + keep_base_url=True, + retry_transient_errors=True, + timeout=120, + ) + instance.auth() + + logging.info("Reset GitLab") + reset_gitlab(instance) + + return instance + + +@pytest.fixture(scope="session") +def gitlab_runner(gl: gitlab.Gitlab): + container = "gitlab-runner-test" + runner_description = "python-gitlab-runner" + if TYPE_CHECKING: + assert gl.user is not None + + runner = gl.user.runners.create( + {"runner_type": "instance_type", "run_untagged": True} + ) + url = "http://gitlab" + + docker_exec = ["docker", "exec", container, "gitlab-runner"] + register = [ + "register", + "--non-interactive", + "--token", + runner.token, + "--description", + runner_description, + "--url", + url, + "--clone-url", + url, + "--executor", + "shell", + ] + + yield check_output(docker_exec + register).decode() + + gl.runners.delete(token=runner.token) + + +@pytest.fixture(scope="module") +def group(gl): + """Group fixture for group API resource tests.""" + _id = uuid.uuid4().hex + data = {"name": f"test-group-{_id}", "path": f"group-{_id}"} + group = gl.groups.create(data) + + yield group + + helpers.safe_delete(group) + + +@pytest.fixture(scope="module") +def project(gl): + """Project fixture for project API resource tests.""" + _id = uuid.uuid4().hex + name = f"test-project-{_id}" + + project = gl.projects.create(name=name) + + yield project + + helpers.safe_delete(project) + + +@pytest.fixture(scope="function") +def make_merge_request(project): + """Fixture factory used to create a merge_request. + + It will create a branch, add a commit to the branch, and then create a + merge request against project.default_branch. The MR will be returned. + + When finished any created merge requests and branches will be deleted. + + NOTE: No attempt is made to restore project.default_branch to its previous + state. So if the merge request is merged then its content will be in the + project.default_branch branch. + """ + + to_delete = [] + + def _make_merge_request(*, source_branch: str, create_pipeline: bool = False): + # Wait for processes to be done before we start... + # NOTE(jlvillal): Sometimes the CI would give a "500 Internal Server + # Error". Hoping that waiting until all other processes are done will + # help with that. + # Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge) + time.sleep(30) + + project.refresh() # Gets us the current default branch + logging.info(f"Creating branch {source_branch}") + mr_branch = project.branches.create( + {"branch": source_branch, "ref": project.default_branch} + ) + # NOTE(jlvillal): Must create a commit in the new branch before we can + # create an MR that will work. + project.files.create( + { + "file_path": f"README.{source_branch}", + "branch": source_branch, + "content": "Initial content", + "commit_message": "New commit in new branch", + } + ) + + if create_pipeline: + project.files.create( + { + "file_path": ".gitlab-ci.yml", + "branch": source_branch, + "content": """ +test: + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' + script: + - sleep 24h # We don't expect this to finish +""", + "commit_message": "Add a simple pipeline", + } + ) + mr = project.mergerequests.create( + { + "source_branch": source_branch, + "target_branch": project.default_branch, + "title": "Should remove source branch", + "remove_source_branch": True, + } + ) + + # Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge) + time.sleep(5) + + mr_iid = mr.iid + for _ in range(60): + mr = project.mergerequests.get(mr_iid) + if ( + mr.detailed_merge_status == "checking" + or mr.detailed_merge_status == "unchecked" + ): # noqa PLR1714 + time.sleep(0.5) + else: + break + + assert mr.detailed_merge_status != "checking" + assert mr.detailed_merge_status != "unchecked" + + to_delete.extend([mr, mr_branch]) + return mr + + yield _make_merge_request + + for object in to_delete: + helpers.safe_delete(object) + + +@pytest.fixture(scope="function") +def merge_request(make_merge_request, project): + _id = uuid.uuid4().hex + return make_merge_request(source_branch=f"branch-{_id}") + + +@pytest.fixture(scope="function") +def merge_request_with_pipeline(make_merge_request, project): + _id = uuid.uuid4().hex + return make_merge_request(source_branch=f"branch-{_id}", create_pipeline=True) + + +@pytest.fixture(scope="module") +def project_file(project): + """File fixture for tests requiring a project with files and branches.""" + project_file = project.files.create( + { + "file_path": "README", + "branch": "main", + "content": "Initial content", + "commit_message": "Initial commit", + } + ) + + return project_file + + +@pytest.fixture(scope="function") +def release(project, project_file): + _id = uuid.uuid4().hex + name = f"we_have_a_slash/test-release-{_id}" + + project.refresh() # Gets us the current default branch + release = project.releases.create( + { + "name": name, + "tag_name": _id, + "description": "description", + "ref": project.default_branch, + } + ) + + return release + + +@pytest.fixture(scope="function") +def service(project): + """This is just a convenience fixture to make test cases slightly prettier. Project + services are not idempotent. A service cannot be retrieved until it is enabled. + After it is enabled the first time, it can never be fully deleted, only disabled.""" + service = project.services.update("asana", {"api_key": "api_key"}) + + yield service + + try: + project.services.delete("asana") + except gitlab.exceptions.GitlabDeleteError as e: + print(f"Service already disabled: {e}") + + +@pytest.fixture(scope="module") +def user(gl): + """User fixture for user API resource tests.""" + _id = uuid.uuid4().hex + email = f"user{_id}@email.com" + username = f"user{_id}" + name = f"User {_id}" + password = "E4596f8be406Bc3a14a4ccdb1df80587" + + user = gl.users.create(email=email, username=username, name=name, password=password) + + yield user + + helpers.safe_delete(user) + + +@pytest.fixture(scope="module") +def issue(project): + """Issue fixture for issue API resource tests.""" + _id = uuid.uuid4().hex + data = {"title": f"Issue {_id}", "description": f"Issue {_id} description"} + + return project.issues.create(data) + + +@pytest.fixture(scope="module") +def milestone(project): + _id = uuid.uuid4().hex + data = {"title": f"milestone{_id}"} + + return project.milestones.create(data) + + +@pytest.fixture(scope="module") +def label(project): + """Label fixture for project label API resource tests.""" + _id = uuid.uuid4().hex + data = { + "name": f"prjlabel{_id}", + "description": f"prjlabel1 {_id} description", + "color": "#112233", + } + + return project.labels.create(data) + + +@pytest.fixture(scope="module") +def group_label(group): + """Label fixture for group label API resource tests.""" + _id = uuid.uuid4().hex + data = { + "name": f"grplabel{_id}", + "description": f"grplabel1 {_id} description", + "color": "#112233", + } + + return group.labels.create(data) + + +@pytest.fixture(scope="module") +def epic(group): + """Fixture for group epic API resource tests.""" + _id = uuid.uuid4().hex + return group.epics.create({"title": f"epic-{_id}", "description": f"Epic {_id}"}) + + +@pytest.fixture(scope="module") +def variable(project): + """Variable fixture for project variable API resource tests.""" + _id = uuid.uuid4().hex + data = {"key": f"var{_id}", "value": f"Variable {_id}"} + + return project.variables.create(data) + + +@pytest.fixture(scope="module") +def deploy_token(project): + """Deploy token fixture for project deploy token API resource tests.""" + _id = uuid.uuid4().hex + data = { + "name": f"token-{_id}", + "username": "root", + "expires_at": datetime.date.today().isoformat(), + "scopes": "read_registry", + } + + return project.deploytokens.create(data) + + +@pytest.fixture(scope="module") +def group_deploy_token(group): + """Deploy token fixture for group deploy token API resource tests.""" + _id = uuid.uuid4().hex + data = { + "name": f"group-token-{_id}", + "username": "root", + "expires_at": datetime.date.today().isoformat(), + "scopes": "read_registry", + } + + return group.deploytokens.create(data) + + +@pytest.fixture(scope="session") +def GPG_KEY(): # noqa + return """-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQENBFn5mzYBCADH6SDVPAp1zh/hxmTi0QplkOfExBACpuY6OhzNdIg+8/528b3g +Y5YFR6T/HLv/PmeHskUj21end1C0PNG2T9dTx+2Vlh9ISsSG1kyF9T5fvMR3bE0x +Dl6S489CXZrjPTS9SHk1kF+7dwjUxLJyxF9hPiSihFefDFu3NeOtG/u8vbC1mewQ +ZyAYue+mqtqcCIFFoBz7wHKMWjIVSJSyTkXExu4OzpVvy3l2EikbvavI3qNz84b+ +Mgkv/kiBlNoCy3CVuPk99RYKZ3lX1vVtqQ0OgNGQvb4DjcpyjmbKyibuZwhDjIOh +au6d1OyEbayTntd+dQ4j9EMSnEvm/0MJ4eXPABEBAAG0G0dpdGxhYlRlc3QxIDxm +YWtlQGZha2UudGxkPokBNwQTAQgAIQUCWfmbNgIbAwULCQgHAgYVCAkKCwIEFgID +AQIeAQIXgAAKCRBgxELHf8f3hF3yB/wNJlWPKY65UsB4Lo0hs1OxdxCDqXogSi0u +6crDEIiyOte62pNZKzWy8TJcGZvznRTZ7t8hXgKFLz3PRMcl+vAiRC6quIDUj+2V +eYfwaItd1lUfzvdCaC7Venf4TQ74f5vvNg/zoGwE6eRoSbjlLv9nqsxeA0rUBUQL +LYikWhVMP3TrlfgfduYvh6mfgh57BDLJ9kJVpyfxxx9YLKZbaas9sPa6LgBtR555 +JziUxHmbEv8XCsUU8uoFeP1pImbNBplqE3wzJwzOMSmmch7iZzrAwfN7N2j3Wj0H +B5kQddJ9dmB4BbU0IXGhWczvdpxboI2wdY8a1JypxOdePoph/43iuQENBFn5mzYB +CADnTPY0Zf3d9zLjBNgIb3yDl94uOcKCq0twNmyjMhHzGqw+UMe9BScy34GL94Al +xFRQoaL+7P8hGsnsNku29A/VDZivcI+uxTx4WQ7OLcn7V0bnHV4d76iky2ufbUt/ +GofthjDs1SonePO2N09sS4V4uK0d5N4BfCzzXgvg8etCLxNmC9BGt7AaKUUzKBO4 +2QvNNaC2C/8XEnOgNWYvR36ylAXAmo0sGFXUsBCTiq1fugS9pwtaS2JmaVpZZ3YT +pMZlS0+SjC5BZYFqSmKCsA58oBRzCxQz57nR4h5VEflgD+Hy0HdW0UHETwz83E6/ +U0LL6YyvhwFr6KPq5GxinSvfABEBAAGJAR8EGAEIAAkFAln5mzYCGwwACgkQYMRC +x3/H94SJgwgAlKQb10/xcL/epdDkR7vbiei7huGLBpRDb/L5fM8B5W77Qi8Xmuqj +cCu1j99ZCA5hs/vwVn8j8iLSBGMC5gxcuaar/wtmiaEvT9fO/h6q4opG7NcuiJ8H +wRj8ccJmRssNqDD913PLz7T40Ts62blhrEAlJozGVG/q7T3RAZcskOUHKeHfc2RI +YzGsC/I9d7k6uxAv1L9Nm5F2HaAQDzhkdd16nKkGaPGR35cT1JLInkfl5cdm7ldN +nxs4TLO3kZjUTgWKdhpgRNF5hwaz51ZjpebaRf/ZqRuNyX4lIRolDxzOn/+O1o8L +qG2ZdhHHmSK2LaQLFiSprUkikStNU9BqSQ== +=5OGa +-----END PGP PUBLIC KEY BLOCK-----""" + + +@pytest.fixture(scope="session") +def SSH_KEY(): # noqa + return ( + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDZAjAX8vTiHD7Yi3/EzuVaDChtih" + "79HyJZ6H9dEqxFfmGA1YnncE0xujQ64TCebhkYJKzmTJCImSVkOu9C4hZgsw6eE76n" + "+Cg3VwEeDUFy+GXlEJWlHaEyc3HWioxgOALbUp3rOezNh+d8BDwwqvENGoePEBsz5l" + "a6WP5lTi/HJIjAl6Hu+zHgdj1XVExeH+S52EwpZf/ylTJub0Bl5gHwf/siVE48mLMI" + "sqrukXTZ6Zg+8EHAIvIQwJ1dKcXe8P5IoLT7VKrbkgAnolS0I8J+uH7KtErZJb5oZh" + "S4OEwsNpaXMAr+6/wWSpircV2/e7sFLlhlKBC4Iq1MpqlZ7G3p foo@bar" + ) + + +@pytest.fixture(scope="session") +def DEPLOY_KEY(): # noqa + return ( + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFdRyjJQh+1niBpXqE2I8dzjG" + "MXFHlRjX9yk/UfOn075IdaockdU58sw2Ai1XIWFpZpfJkW7z+P47ZNSqm1gzeXI" + "rtKa9ZUp8A7SZe8vH4XVn7kh7bwWCUirqtn8El9XdqfkzOs/+FuViriUWoJVpA6" + "WZsDNaqINFKIA5fj/q8XQw+BcS92L09QJg9oVUuH0VVwNYbU2M2IRmSpybgC/gu" + "uWTrnCDMmLItksATifLvRZwgdI8dr+q6tbxbZknNcgEPrI2jT0hYN9ZcjNeWuyv" + "rke9IepE7SPBT41C+YtUX4dfDZDmczM1cE0YL/krdUCfuZHMa4ZS2YyNd6slufc" + "vn bar@foo" + ) diff --git a/gitlab/testing/fixtures/helpers.py b/gitlab/testing/fixtures/helpers.py new file mode 100644 index 000000000..8d33de02e --- /dev/null +++ b/gitlab/testing/fixtures/helpers.py @@ -0,0 +1,71 @@ +import logging +import time +from typing import Optional, TYPE_CHECKING + +import pytest + +import gitlab +import gitlab.base +import gitlab.exceptions + +SLEEP_INTERVAL = 0.5 +TIMEOUT = 60 # seconds before timeout will occur +MAX_ITERATIONS = int(TIMEOUT / SLEEP_INTERVAL) + + +def get_gitlab_plan(gl: gitlab.Gitlab) -> Optional[str]: + """Determine the license available on the GitLab instance""" + try: + license = gl.get_license() + except gitlab.exceptions.GitlabLicenseError: + # Without a license we assume only Free features are available + return None + + if TYPE_CHECKING: + assert isinstance(license["plan"], str) + return license.get("plan", None) + + +def safe_delete(object: gitlab.base.RESTObject) -> None: + """Ensure the object specified can not be retrieved. If object still exists after + timeout period, fail the test""" + manager = object.manager + for index in range(MAX_ITERATIONS): + try: + object = manager.get(object.get_id()) # type: ignore[attr-defined] + except gitlab.exceptions.GitlabGetError: + return + + if index: + logging.info(f"Attempt {index + 1} to delete {object!r}.") + try: + if isinstance(object, gitlab.v4.objects.User): + # You can't use this option if the selected user is the sole owner of any groups + # Use `hard_delete=True` or a 'Ghost User' may be created. + # https://docs.gitlab.com/ee/api/users.html#user-deletion + object.delete(hard_delete=True) + if index > 1: + # If User is the sole owner of any group it won't be deleted, + # which combined with parents group never immediately deleting in GL 16 + # we shouldn't cause test to fail if it still exists + return + elif isinstance(object, gitlab.v4.objects.Project): + # Immediately delete rather than waiting for at least 1day + # https://docs.gitlab.com/ee/api/projects.html#delete-project + object.delete(permanently_remove=True) + pass + else: + # We only attempt to delete parent groups to prevent dangling sub-groups + # However parent groups can only be deleted on a delay in Gl 16 + # https://docs.gitlab.com/ee/api/groups.html#remove-group + object.delete() + except gitlab.exceptions.GitlabDeleteError: + logging.info(f"{object!r} already deleted or scheduled for deletion.") + if isinstance(object, gitlab.v4.objects.Group): + # Parent groups can never be immediately deleted in GL 16, + # so don't cause test to fail if it still exists + return + pass + + time.sleep(SLEEP_INTERVAL) + pytest.fail(f"{object!r} was not deleted") diff --git a/gitlab/testing/fixtures/meta.py b/gitlab/testing/fixtures/meta.py new file mode 100644 index 000000000..004b00fee --- /dev/null +++ b/gitlab/testing/fixtures/meta.py @@ -0,0 +1,32 @@ +import pytest + +import gitlab + + +@pytest.fixture(autouse=True) +def mock_clean_config(monkeypatch: pytest.MonkeyPatch) -> None: + """Ensures user-defined environment variables do not interfere with tests.""" + monkeypatch.delenv("PYTHON_GITLAB_CFG", raising=False) + monkeypatch.delenv("GITLAB_PRIVATE_TOKEN", raising=False) + monkeypatch.delenv("GITLAB_URL", raising=False) + monkeypatch.delenv("CI_JOB_TOKEN", raising=False) + monkeypatch.delenv("CI_SERVER_URL", raising=False) + + +@pytest.fixture(autouse=True) +def default_files(monkeypatch: pytest.MonkeyPatch) -> None: + """Ensures user configuration files do not interfere with tests.""" + monkeypatch.setattr(gitlab.config, "_DEFAULT_FILES", []) + + +@pytest.fixture +def valid_gitlab_ci_yml() -> str: + return """--- +:test_job: + :script: echo 1 +""" + + +@pytest.fixture +def invalid_gitlab_ci_yml() -> str: + return "invalid" diff --git a/gitlab/testing/plugin.py b/gitlab/testing/plugin.py new file mode 100644 index 000000000..59a64f182 --- /dev/null +++ b/gitlab/testing/plugin.py @@ -0,0 +1,3 @@ +from .docker.docker import * # noqa +from .fixtures.gitlab import * # noqa +from .fixtures.meta import * # noqa diff --git a/pyproject.toml b/pyproject.toml index 7b8510b94..26c418937 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,7 @@ dependencies = [ classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", + "Framework :: Pytest", "Intended Audience :: System Administrators", "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)", "Natural Language :: English", @@ -44,10 +45,14 @@ dynamic = ["version"] autocompletion = ["argcomplete>=1.10.0,<3"] yaml = ["PyYaml>=6.0.1"] graphql = ["gql[httpx]>=3.5.0,<4"] +testing = ["attrs", "pytest", "pytest-docker"] [project.scripts] gitlab = "gitlab.cli:main" +[project.entry-points.pytest11] +gitlab = "gitlab.testing.plugin" + [project.urls] Homepage = "https://github.com/python-gitlab/python-gitlab" Changelog = "https://github.com/python-gitlab/python-gitlab/blob/main/CHANGELOG.md" @@ -78,6 +83,7 @@ skip_magic_trailing_comma = true module = [ "docs.*", "docs.ext.*", + "gitlab.testing.*", "tests.unit.*", ] ignore_errors = true @@ -108,6 +114,9 @@ exclude_commit_patterns = [ mode = "update" insertion_flag = "All versions below are listed in reverse chronological order." +[tool.pylint.main] +ignore-paths = ["gitlab/testing"] + [tool.pylint.messages_control] max-line-length = 88 jobs = 0 # Use auto-detected number of multiple processes to speed up Pylint. diff --git a/requirements-docker.txt b/requirements-docker.txt index 123a4438a..3062f9bcf 100644 --- a/requirements-docker.txt +++ b/requirements-docker.txt @@ -1,3 +1,2 @@ -r requirements.txt -r requirements-test.txt -pytest-docker==3.2.5 diff --git a/requirements-test.txt b/requirements-test.txt index e8843be94..d89860ff2 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -6,6 +6,7 @@ pytest-console-scripts==1.4.1 pytest-cov==7.0.0 pytest-github-actions-annotate-failures==0.3.0 pytest==9.0.2 +pytest-docker==3.2.5 PyYaml==6.0.3 responses==0.25.8 respx==0.22.0 diff --git a/tests/conftest.py b/tests/conftest.py index de15d0a6c..ff5236362 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,38 +3,7 @@ import _pytest.config import pytest -import gitlab - @pytest.fixture(scope="session") def test_dir(pytestconfig: _pytest.config.Config) -> pathlib.Path: return pytestconfig.rootdir / "tests" # type: ignore - - -@pytest.fixture(autouse=True) -def mock_clean_config(monkeypatch: pytest.MonkeyPatch) -> None: - """Ensures user-defined environment variables do not interfere with tests.""" - monkeypatch.delenv("PYTHON_GITLAB_CFG", raising=False) - monkeypatch.delenv("GITLAB_PRIVATE_TOKEN", raising=False) - monkeypatch.delenv("GITLAB_URL", raising=False) - monkeypatch.delenv("CI_JOB_TOKEN", raising=False) - monkeypatch.delenv("CI_SERVER_URL", raising=False) - - -@pytest.fixture(autouse=True) -def default_files(monkeypatch: pytest.MonkeyPatch) -> None: - """Ensures user configuration files do not interfere with tests.""" - monkeypatch.setattr(gitlab.config, "_DEFAULT_FILES", []) - - -@pytest.fixture -def valid_gitlab_ci_yml() -> str: - return """--- -:test_job: - :script: echo 1 -""" - - -@pytest.fixture -def invalid_gitlab_ci_yml() -> str: - return "invalid" diff --git a/tests/functional/conftest.py b/tests/functional/conftest.py index f4f2f6df3..4aa202b05 100644 --- a/tests/functional/conftest.py +++ b/tests/functional/conftest.py @@ -1,42 +1,9 @@ -from __future__ import annotations - -import dataclasses -import datetime -import logging import pathlib -import tempfile -import time -import uuid -from subprocess import check_output -from typing import Sequence, TYPE_CHECKING import pytest -import requests import gitlab -import gitlab.base -from tests.functional import helpers -from tests.functional.fixtures.docker import * # noqa - -SLEEP_TIME = 10 - - -@dataclasses.dataclass -class GitlabVersion: - major: int - minor: int - patch: str - revision: str - - def __post_init__(self): - self.major, self.minor = int(self.major), int(self.minor) - - -@pytest.fixture(scope="session") -def gitlab_version(gl) -> GitlabVersion: - version, revision = gl.version() - major, minor, patch = version.split(".") - return GitlabVersion(major=major, minor=minor, patch=patch, revision=revision) +from gitlab.testing.fixtures.helpers import get_gitlab_plan @pytest.fixture(scope="session") @@ -44,233 +11,9 @@ def fixture_dir(test_dir: pathlib.Path) -> pathlib.Path: return test_dir / "functional" / "fixtures" -@pytest.fixture(scope="session") -def gitlab_service_name() -> str: - """The "service" name is the one defined in the `docker-compose.yml` file""" - return "gitlab" - - -@pytest.fixture(scope="session") -def gitlab_container_name() -> str: - """The "container" name is the one defined in the `docker-compose.yml` file - for the "gitlab" service""" - return "gitlab-test" - - -@pytest.fixture(scope="session") -def gitlab_docker_port(docker_services, gitlab_service_name: str) -> int: - port: int = docker_services.port_for(gitlab_service_name, container_port=80) - return port - - -@pytest.fixture(scope="session") -def gitlab_url(docker_ip: str, gitlab_docker_port: int) -> str: - return f"http://{docker_ip}:{gitlab_docker_port}" - - -def reset_gitlab(gl: gitlab.Gitlab) -> None: - """Delete resources (such as projects, groups, users) that shouldn't - exist.""" - if helpers.get_gitlab_plan(gl): - logging.info("GitLab EE detected") - # NOTE(jlvillal, timknight): By default in GitLab EE it will wait 7 days before - # deleting a group or project. - # In GL 16.0 we need to call delete with `permanently_remove=True` for projects and sub groups - # (handled in helpers.py safe_delete) - settings = gl.settings.get() - modified_settings = False - if settings.deletion_adjourned_period != 1: - logging.info("Setting `deletion_adjourned_period` to 1 Day") - settings.deletion_adjourned_period = 1 - modified_settings = True - if modified_settings: - settings.save() - - for project in gl.projects.list(): - for project_deploy_token in project.deploytokens.list(): - logging.info( - f"Deleting deploy token: {project_deploy_token.username!r} in " - f"project: {project.path_with_namespace!r}" - ) - helpers.safe_delete(project_deploy_token) - logging.info(f"Deleting project: {project.path_with_namespace!r}") - helpers.safe_delete(project) - - for group in gl.groups.list(): - # skip deletion of a descendant group to prevent scenarios where parent group - # gets deleted leaving a dangling descendant whose deletion will throw 404s. - if group.parent_id: - logging.info( - f"Skipping deletion of {group.full_path} as it is a descendant " - f"group and will be removed when the parent group is deleted" - ) - continue - - for group_deploy_token in group.deploytokens.list(): - logging.info( - f"Deleting deploy token: {group_deploy_token.username!r} in " - f"group: {group.path_with_namespace!r}" - ) - helpers.safe_delete(group_deploy_token) - logging.info(f"Deleting group: {group.full_path!r}") - helpers.safe_delete(group) - for topic in gl.topics.list(): - logging.info(f"Deleting topic: {topic.name!r}") - helpers.safe_delete(topic) - for variable in gl.variables.list(): - logging.info(f"Deleting variable: {variable.key!r}") - helpers.safe_delete(variable) - for user in gl.users.list(): - if user.username not in ["root", "ghost"]: - logging.info(f"Deleting user: {user.username!r}") - helpers.safe_delete(user) - - -def set_token(container: str, fixture_dir: pathlib.Path) -> str: - logging.info("Creating API token.") - set_token_rb = fixture_dir / "set_token.rb" - - with open(set_token_rb, encoding="utf-8") as f: - set_token_command = f.read().strip() - - rails_command = [ - "docker", - "exec", - container, - "gitlab-rails", - "runner", - set_token_command, - ] - output = check_output(rails_command).decode().strip() - logging.info("Finished creating API token.") - - return output - - -def pytest_report_collectionfinish( - config: pytest.Config, start_path: pathlib.Path, items: Sequence[pytest.Item] -): - return [ - "", - "Starting GitLab container.", - "Waiting for GitLab to reconfigure.", - "This will take a few minutes.", - ] - - -def pytest_addoption(parser): - parser.addoption( - "--keep-containers", - action="store_true", - help="Keep containers running after testing", - ) - - -@pytest.fixture(scope="session") -def temp_dir() -> pathlib.Path: - return pathlib.Path(tempfile.gettempdir()) - - -@pytest.fixture(scope="session") -def check_is_alive(): - """ - Return a healthcheck function fixture for the GitLab container spinup. - """ - - def _check(*, container: str, start_time: float, gitlab_url: str) -> bool: - setup_time = time.perf_counter() - start_time - minutes, seconds = int(setup_time / 60), int(setup_time % 60) - logging.info( - f"Checking if GitLab container is up. " - f"Have been checking for {minutes} minute(s), {seconds} seconds ..." - ) - logs = ["docker", "logs", container] - if "gitlab Reconfigured!" not in check_output(logs).decode(): - return False - logging.debug("GitLab has finished reconfiguring.") - for check in ("health", "readiness", "liveness"): - url = f"{gitlab_url}/-/{check}" - logging.debug(f"Checking {check!r} endpoint at: {url}") - try: - result = requests.get(url, timeout=1.0) - except requests.exceptions.Timeout: - logging.info(f"{check!r} check timed out") - return False - if result.status_code != 200: - logging.info(f"{check!r} check did not return 200: {result!r}") - return False - logging.debug(f"{check!r} check passed: {result!r}") - logging.debug(f"Sleeping for {SLEEP_TIME}") - time.sleep(SLEEP_TIME) - return True - - return _check - - -@pytest.fixture(scope="session") -def gitlab_token( - check_is_alive, - gitlab_container_name: str, - gitlab_url: str, - docker_services, - fixture_dir: pathlib.Path, -) -> str: - start_time = time.perf_counter() - logging.info("Waiting for GitLab container to become ready.") - docker_services.wait_until_responsive( - timeout=300, - pause=10, - check=lambda: check_is_alive( - container=gitlab_container_name, - start_time=start_time, - gitlab_url=gitlab_url, - ), - ) - setup_time = time.perf_counter() - start_time - minutes, seconds = int(setup_time / 60), int(setup_time % 60) - logging.info( - f"GitLab container is now ready after {minutes} minute(s), {seconds} seconds" - ) - - return set_token(gitlab_container_name, fixture_dir=fixture_dir) - - -@pytest.fixture(scope="session") -def gitlab_config(gitlab_url: str, gitlab_token: str, temp_dir: pathlib.Path): - config_file = temp_dir / "python-gitlab.cfg" - - config = f"""[global] -default = local -timeout = 60 - -[local] -url = {gitlab_url} -private_token = {gitlab_token} -api_version = 4""" - - with open(config_file, "w", encoding="utf-8") as f: - f.write(config) - - return config_file - - -@pytest.fixture(scope="session") -def gl(gitlab_url: str, gitlab_token: str) -> gitlab.Gitlab: - """Helper instance to make fixtures and asserts directly via the API.""" - - logging.info("Instantiating python-gitlab gitlab.Gitlab instance") - instance = gitlab.Gitlab(gitlab_url, private_token=gitlab_token) - instance.auth() - - logging.info("Reset GitLab") - reset_gitlab(instance) - - return instance - - @pytest.fixture(scope="session") def gitlab_plan(gl: gitlab.Gitlab) -> str | None: - return helpers.get_gitlab_plan(gl) + return get_gitlab_plan(gl) @pytest.fixture(autouse=True) @@ -278,7 +21,7 @@ def gitlab_premium(gitlab_plan, request) -> None: if gitlab_plan in ("premium", "ultimate"): return - if request.node.get_closest_marker("gitlab_ultimate"): + if request.node.get_closest_marker("gitlab_premium"): pytest.skip("Test requires GitLab Premium plan") @@ -289,372 +32,3 @@ def gitlab_ultimate(gitlab_plan, request) -> None: if request.node.get_closest_marker("gitlab_ultimate"): pytest.skip("Test requires GitLab Ultimate plan") - - -@pytest.fixture(scope="session") -def gitlab_runner(gl: gitlab.Gitlab): - container = "gitlab-runner-test" - runner_description = "python-gitlab-runner" - if TYPE_CHECKING: - assert gl.user is not None - - runner = gl.user.runners.create( - {"runner_type": "instance_type", "run_untagged": True} - ) - url = "http://gitlab" - - docker_exec = ["docker", "exec", container, "gitlab-runner"] - register = [ - "register", - "--non-interactive", - "--token", - runner.token, - "--description", - runner_description, - "--url", - url, - "--clone-url", - url, - "--executor", - "shell", - ] - - yield check_output(docker_exec + register).decode() - - gl.runners.delete(token=runner.token) - - -@pytest.fixture(scope="module") -def group(gl): - """Group fixture for group API resource tests.""" - _id = uuid.uuid4().hex - data = {"name": f"test-group-{_id}", "path": f"group-{_id}"} - group = gl.groups.create(data) - - yield group - - helpers.safe_delete(group) - - -@pytest.fixture(scope="module") -def project(gl): - """Project fixture for project API resource tests.""" - _id = uuid.uuid4().hex - name = f"test-project-{_id}" - - project = gl.projects.create(name=name) - - yield project - - helpers.safe_delete(project) - - -@pytest.fixture(scope="function") -def make_merge_request(project): - """Fixture factory used to create a merge_request. - - It will create a branch, add a commit to the branch, and then create a - merge request against project.default_branch. The MR will be returned. - - When finished any created merge requests and branches will be deleted. - - NOTE: No attempt is made to restore project.default_branch to its previous - state. So if the merge request is merged then its content will be in the - project.default_branch branch. - """ - - to_delete = [] - - def _make_merge_request(*, source_branch: str, create_pipeline: bool = False): - # Wait for processes to be done before we start... - # NOTE(jlvillal): Sometimes the CI would give a "500 Internal Server - # Error". Hoping that waiting until all other processes are done will - # help with that. - # Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge) - time.sleep(30) - - project.refresh() # Gets us the current default branch - logging.info(f"Creating branch {source_branch}") - mr_branch = project.branches.create( - {"branch": source_branch, "ref": project.default_branch} - ) - # NOTE(jlvillal): Must create a commit in the new branch before we can - # create an MR that will work. - project.files.create( - { - "file_path": f"README.{source_branch}", - "branch": source_branch, - "content": "Initial content", - "commit_message": "New commit in new branch", - } - ) - - if create_pipeline: - project.files.create( - { - "file_path": ".gitlab-ci.yml", - "branch": source_branch, - "content": """ -test: - rules: - - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' - script: - - sleep 24h # We don't expect this to finish -""", - "commit_message": "Add a simple pipeline", - } - ) - mr = project.mergerequests.create( - { - "source_branch": source_branch, - "target_branch": project.default_branch, - "title": "Should remove source branch", - "remove_source_branch": True, - } - ) - - # Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge) - time.sleep(5) - - mr_iid = mr.iid - for _ in range(60): - mr = project.mergerequests.get(mr_iid) - if ( - mr.detailed_merge_status == "checking" - or mr.detailed_merge_status == "unchecked" - ): - time.sleep(0.5) - else: - break - - assert mr.detailed_merge_status != "checking" - assert mr.detailed_merge_status != "unchecked" - - to_delete.extend([mr, mr_branch]) - return mr - - yield _make_merge_request - - for object in to_delete: - helpers.safe_delete(object) - - -@pytest.fixture(scope="function") -def merge_request(make_merge_request, project): - _id = uuid.uuid4().hex - return make_merge_request(source_branch=f"branch-{_id}") - - -@pytest.fixture(scope="function") -def merge_request_with_pipeline(make_merge_request, project): - _id = uuid.uuid4().hex - return make_merge_request(source_branch=f"branch-{_id}", create_pipeline=True) - - -@pytest.fixture(scope="module") -def project_file(project): - """File fixture for tests requiring a project with files and branches.""" - project_file = project.files.create( - { - "file_path": "README", - "branch": "main", - "content": "Initial content", - "commit_message": "Initial commit", - } - ) - - return project_file - - -@pytest.fixture(scope="function") -def release(project, project_file): - _id = uuid.uuid4().hex - name = f"we_have_a_slash/test-release-{_id}" - - project.refresh() # Gets us the current default branch - release = project.releases.create( - { - "name": name, - "tag_name": _id, - "description": "description", - "ref": project.default_branch, - } - ) - - return release - - -@pytest.fixture(scope="function") -def service(project): - """This is just a convenience fixture to make test cases slightly prettier. Project - services are not idempotent. A service cannot be retrieved until it is enabled. - After it is enabled the first time, it can never be fully deleted, only disabled.""" - service = project.services.update("asana", {"api_key": "api_key"}) - - yield service - - try: - project.services.delete("asana") - except gitlab.exceptions.GitlabDeleteError as e: - print(f"Service already disabled: {e}") - - -@pytest.fixture(scope="module") -def user(gl): - """User fixture for user API resource tests.""" - _id = uuid.uuid4().hex - email = f"user{_id}@email.com" - username = f"user{_id}" - name = f"User {_id}" - password = "E4596f8be406Bc3a14a4ccdb1df80587" - - user = gl.users.create(email=email, username=username, name=name, password=password) - - yield user - - helpers.safe_delete(user) - - -@pytest.fixture(scope="module") -def issue(project): - """Issue fixture for issue API resource tests.""" - _id = uuid.uuid4().hex - data = {"title": f"Issue {_id}", "description": f"Issue {_id} description"} - - return project.issues.create(data) - - -@pytest.fixture(scope="module") -def milestone(project): - _id = uuid.uuid4().hex - data = {"title": f"milestone{_id}"} - - return project.milestones.create(data) - - -@pytest.fixture(scope="module") -def label(project): - """Label fixture for project label API resource tests.""" - _id = uuid.uuid4().hex - data = { - "name": f"prjlabel{_id}", - "description": f"prjlabel1 {_id} description", - "color": "#112233", - } - - return project.labels.create(data) - - -@pytest.fixture(scope="module") -def group_label(group): - """Label fixture for group label API resource tests.""" - _id = uuid.uuid4().hex - data = { - "name": f"grplabel{_id}", - "description": f"grplabel1 {_id} description", - "color": "#112233", - } - - return group.labels.create(data) - - -@pytest.fixture(scope="module") -def epic(group): - """Fixture for group epic API resource tests.""" - _id = uuid.uuid4().hex - return group.epics.create({"title": f"epic-{_id}", "description": f"Epic {_id}"}) - - -@pytest.fixture(scope="module") -def variable(project): - """Variable fixture for project variable API resource tests.""" - _id = uuid.uuid4().hex - data = {"key": f"var{_id}", "value": f"Variable {_id}"} - - return project.variables.create(data) - - -@pytest.fixture(scope="module") -def deploy_token(project): - """Deploy token fixture for project deploy token API resource tests.""" - _id = uuid.uuid4().hex - data = { - "name": f"token-{_id}", - "username": "root", - "expires_at": datetime.date.today().isoformat(), - "scopes": "read_registry", - } - - return project.deploytokens.create(data) - - -@pytest.fixture(scope="module") -def group_deploy_token(group): - """Deploy token fixture for group deploy token API resource tests.""" - _id = uuid.uuid4().hex - data = { - "name": f"group-token-{_id}", - "username": "root", - "expires_at": datetime.date.today().isoformat(), - "scopes": "read_registry", - } - - return group.deploytokens.create(data) - - -@pytest.fixture(scope="session") -def GPG_KEY(): - return """-----BEGIN PGP PUBLIC KEY BLOCK----- - -mQENBFn5mzYBCADH6SDVPAp1zh/hxmTi0QplkOfExBACpuY6OhzNdIg+8/528b3g -Y5YFR6T/HLv/PmeHskUj21end1C0PNG2T9dTx+2Vlh9ISsSG1kyF9T5fvMR3bE0x -Dl6S489CXZrjPTS9SHk1kF+7dwjUxLJyxF9hPiSihFefDFu3NeOtG/u8vbC1mewQ -ZyAYue+mqtqcCIFFoBz7wHKMWjIVSJSyTkXExu4OzpVvy3l2EikbvavI3qNz84b+ -Mgkv/kiBlNoCy3CVuPk99RYKZ3lX1vVtqQ0OgNGQvb4DjcpyjmbKyibuZwhDjIOh -au6d1OyEbayTntd+dQ4j9EMSnEvm/0MJ4eXPABEBAAG0G0dpdGxhYlRlc3QxIDxm -YWtlQGZha2UudGxkPokBNwQTAQgAIQUCWfmbNgIbAwULCQgHAgYVCAkKCwIEFgID -AQIeAQIXgAAKCRBgxELHf8f3hF3yB/wNJlWPKY65UsB4Lo0hs1OxdxCDqXogSi0u -6crDEIiyOte62pNZKzWy8TJcGZvznRTZ7t8hXgKFLz3PRMcl+vAiRC6quIDUj+2V -eYfwaItd1lUfzvdCaC7Venf4TQ74f5vvNg/zoGwE6eRoSbjlLv9nqsxeA0rUBUQL -LYikWhVMP3TrlfgfduYvh6mfgh57BDLJ9kJVpyfxxx9YLKZbaas9sPa6LgBtR555 -JziUxHmbEv8XCsUU8uoFeP1pImbNBplqE3wzJwzOMSmmch7iZzrAwfN7N2j3Wj0H -B5kQddJ9dmB4BbU0IXGhWczvdpxboI2wdY8a1JypxOdePoph/43iuQENBFn5mzYB -CADnTPY0Zf3d9zLjBNgIb3yDl94uOcKCq0twNmyjMhHzGqw+UMe9BScy34GL94Al -xFRQoaL+7P8hGsnsNku29A/VDZivcI+uxTx4WQ7OLcn7V0bnHV4d76iky2ufbUt/ -GofthjDs1SonePO2N09sS4V4uK0d5N4BfCzzXgvg8etCLxNmC9BGt7AaKUUzKBO4 -2QvNNaC2C/8XEnOgNWYvR36ylAXAmo0sGFXUsBCTiq1fugS9pwtaS2JmaVpZZ3YT -pMZlS0+SjC5BZYFqSmKCsA58oBRzCxQz57nR4h5VEflgD+Hy0HdW0UHETwz83E6/ -U0LL6YyvhwFr6KPq5GxinSvfABEBAAGJAR8EGAEIAAkFAln5mzYCGwwACgkQYMRC -x3/H94SJgwgAlKQb10/xcL/epdDkR7vbiei7huGLBpRDb/L5fM8B5W77Qi8Xmuqj -cCu1j99ZCA5hs/vwVn8j8iLSBGMC5gxcuaar/wtmiaEvT9fO/h6q4opG7NcuiJ8H -wRj8ccJmRssNqDD913PLz7T40Ts62blhrEAlJozGVG/q7T3RAZcskOUHKeHfc2RI -YzGsC/I9d7k6uxAv1L9Nm5F2HaAQDzhkdd16nKkGaPGR35cT1JLInkfl5cdm7ldN -nxs4TLO3kZjUTgWKdhpgRNF5hwaz51ZjpebaRf/ZqRuNyX4lIRolDxzOn/+O1o8L -qG2ZdhHHmSK2LaQLFiSprUkikStNU9BqSQ== -=5OGa ------END PGP PUBLIC KEY BLOCK-----""" - - -@pytest.fixture(scope="session") -def SSH_KEY(): - return ( - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDZAjAX8vTiHD7Yi3/EzuVaDChtih" - "79HyJZ6H9dEqxFfmGA1YnncE0xujQ64TCebhkYJKzmTJCImSVkOu9C4hZgsw6eE76n" - "+Cg3VwEeDUFy+GXlEJWlHaEyc3HWioxgOALbUp3rOezNh+d8BDwwqvENGoePEBsz5l" - "a6WP5lTi/HJIjAl6Hu+zHgdj1XVExeH+S52EwpZf/ylTJub0Bl5gHwf/siVE48mLMI" - "sqrukXTZ6Zg+8EHAIvIQwJ1dKcXe8P5IoLT7VKrbkgAnolS0I8J+uH7KtErZJb5oZh" - "S4OEwsNpaXMAr+6/wWSpircV2/e7sFLlhlKBC4Iq1MpqlZ7G3p foo@bar" - ) - - -@pytest.fixture(scope="session") -def DEPLOY_KEY(): - return ( - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFdRyjJQh+1niBpXqE2I8dzjG" - "MXFHlRjX9yk/UfOn075IdaockdU58sw2Ai1XIWFpZpfJkW7z+P47ZNSqm1gzeXI" - "rtKa9ZUp8A7SZe8vH4XVn7kh7bwWCUirqtn8El9XdqfkzOs/+FuViriUWoJVpA6" - "WZsDNaqINFKIA5fj/q8XQw+BcS92L09QJg9oVUuH0VVwNYbU2M2IRmSpybgC/gu" - "uWTrnCDMmLItksATifLvRZwgdI8dr+q6tbxbZknNcgEPrI2jT0hYN9ZcjNeWuyv" - "rke9IepE7SPBT41C+YtUX4dfDZDmczM1cE0YL/krdUCfuZHMa4ZS2YyNd6slufc" - "vn bar@foo" - ) diff --git a/tests/functional/helpers.py b/tests/functional/helpers.py index 090673bf7..7507d39e5 100644 --- a/tests/functional/helpers.py +++ b/tests/functional/helpers.py @@ -1,73 +1,8 @@ -from __future__ import annotations +"""Helper utilities for functional tests. -import logging -import time -from typing import TYPE_CHECKING +Re-exported from gitlab.testing for backward compatibility. +""" -import pytest +from gitlab.testing.fixtures.helpers import get_gitlab_plan, safe_delete -import gitlab -import gitlab.base -import gitlab.exceptions - -SLEEP_INTERVAL = 0.5 -TIMEOUT = 60 # seconds before timeout will occur -MAX_ITERATIONS = int(TIMEOUT / SLEEP_INTERVAL) - - -def get_gitlab_plan(gl: gitlab.Gitlab) -> str | None: - """Determine the license available on the GitLab instance""" - try: - license = gl.get_license() - except gitlab.exceptions.GitlabLicenseError: - # Without a license we assume only Free features are available - return None - - if TYPE_CHECKING: - assert isinstance(license["plan"], str) - return license["plan"] - - -def safe_delete(object: gitlab.base.RESTObject) -> None: - """Ensure the object specified can not be retrieved. If object still exists after - timeout period, fail the test""" - manager = object.manager - for index in range(MAX_ITERATIONS): - try: - object = manager.get(object.get_id()) # type: ignore[attr-defined] - except gitlab.exceptions.GitlabGetError: - return - - if index: - logging.info(f"Attempt {index + 1} to delete {object!r}.") - try: - if isinstance(object, gitlab.v4.objects.User): - # You can't use this option if the selected user is the sole owner of any groups - # Use `hard_delete=True` or a 'Ghost User' may be created. - # https://docs.gitlab.com/ee/api/users.html#user-deletion - object.delete(hard_delete=True) - if index > 1: - # If User is the sole owner of any group it won't be deleted, - # which combined with parents group never immediately deleting in GL 16 - # we shouldn't cause test to fail if it still exists - return - elif isinstance(object, gitlab.v4.objects.Project): - # Immediately delete rather than waiting for at least 1day - # https://docs.gitlab.com/ee/api/projects.html#delete-project - object.delete(permanently_remove=True) - pass - else: - # We only attempt to delete parent groups to prevent dangling sub-groups - # However parent groups can only be deleted on a delay in Gl 16 - # https://docs.gitlab.com/ee/api/groups.html#remove-group - object.delete() - except gitlab.exceptions.GitlabDeleteError: - logging.info(f"{object!r} already deleted or scheduled for deletion.") - if isinstance(object, gitlab.v4.objects.Group): - # Parent groups can never be immediately deleted in GL 16, - # so don't cause test to fail if it still exists - return - pass - - time.sleep(SLEEP_INTERVAL) - pytest.fail(f"{object!r} was not deleted") +__all__ = ["get_gitlab_plan", "safe_delete"] diff --git a/tests/unit/test_pytest_plugin.py b/tests/unit/test_pytest_plugin.py new file mode 100644 index 000000000..b193229db --- /dev/null +++ b/tests/unit/test_pytest_plugin.py @@ -0,0 +1,6 @@ +"""Tests for the gitlab pytest plugin.""" + + +def test_gitlab_plugin_is_registered(request): + """'gitlab' is a registered pytest plugin.""" + assert request.config.pluginmanager.hasplugin("gitlab") diff --git a/tox.ini b/tox.ini index 0ba295692..49c585b8f 100644 --- a/tox.ini +++ b/tox.ini @@ -89,7 +89,7 @@ commands = commands = {posargs} [flake8] -exclude = .git,.venv,.tox,dist,doc,*egg,build, +extend-exclude = .venv,.direnv,dist,doc,build, max-line-length = 88 # We ignore the following because we use black to handle code-formatting # E203: Whitespace before ':'