diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index a8cbc559..8b9b81ba 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -33,6 +33,9 @@ jobs: - name: Lint with flake8 run: poetry run flake8 + - name: Lint with mypy + run: poetry run mypy . + - name: Test with pytest run: poetry run py.test --cov=./ --cov-append --cov-report=xml env: diff --git a/CHANGES b/CHANGES index e5c2bc0c..dda4e8c1 100644 --- a/CHANGES +++ b/CHANGES @@ -26,7 +26,8 @@ $ pipx install --suffix=@next 'vcspull' --pip-args '\--pre' --force ### Development - Move to `src/` directory structure (#382) -- libvcs: Update to 0.13.x +- libvcs: Update to 0.17.x (#373) +- Basic mypy annotations (#373) - Remove `.pre-commit-config.yaml`: Let's not automate what the contributor could / should do themselves. - Add [flake8-bugbear](https://github.com/PyCQA/flake8-bugbear) (#379) diff --git a/Makefile b/Makefile index fcd71372..7c950cae 100644 --- a/Makefile +++ b/Makefile @@ -49,3 +49,9 @@ watch_mypy: format_markdown: prettier --parser=markdown -w *.md docs/*.md docs/**/*.md CHANGES + +monkeytype_create: + poetry run monkeytype run `poetry run which py.test` + +monkeytype_apply: + poetry run monkeytype list-modules | xargs -n1 -I{} sh -c 'poetry run monkeytype apply {}' diff --git a/docs/conf.py b/docs/conf.py index 32789a40..5901e5c6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,7 +12,7 @@ sys.path.insert(0, str(cwd / "_ext")) # package data -about = {} +about: dict[str, str] = {} with open(src_root / "vcspull" / "__about__.py") as fp: exec(fp.read(), about) @@ -60,7 +60,7 @@ html_css_files = ["css/custom.css"] html_extra_path = ["manifest.json"] html_theme = "furo" -html_theme_path = [] +html_theme_path: list = [] html_theme_options = { "light_logo": "img/vcspull.svg", "dark_logo": "img/vcspull-dark.svg", diff --git a/docs/developing.md b/docs/developing.md index 1cdb2388..d7fa7f71 100644 --- a/docs/developing.md +++ b/docs/developing.md @@ -163,7 +163,7 @@ $ make serve ## Linting -[flake8] run via CI in our GitHub Actions. See the configuration in `pyproject.toml` and +[flake8] and [mypy] run via CI in our GitHub Actions. See the configuration in `pyproject.toml` and `setup.cfg`. ### flake8 @@ -217,6 +217,55 @@ See `[flake8]` in setup.cfg. ```` +### mypy + +[mypy] is used for static type checking. + +````{tab} Command + +poetry: + +```console +$ poetry run mypy . +``` + +If you setup manually: + +```console +$ mypy . +``` + +```` + +````{tab} make + +```console +$ make mypy +``` + +```` + +````{tab} Watch + +```console +$ make watch_mypy +``` + +requires [`entr(1)`]. +```` + +````{tab} Configuration + +See `[flake8]` in setup.cfg. + +```{literalinclude} ../setup.cfg +:language: ini +:start-at: "[mypy]" + +``` + +```` + ## Publishing to PyPI As of 0.10, [poetry] handles virtualenv creation, package requirements, versioning, @@ -237,3 +286,4 @@ Update `__version__` in `__about__.py` and `pyproject.toml`:: [black]: https://github.com/psf/black [isort]: https://pypi.org/project/isort/ [flake8]: https://flake8.pycqa.org/ +[mypy]: http://mypy-lang.org/ diff --git a/poetry.lock b/poetry.lock index 1eab6be7..efe064ae 100644 --- a/poetry.lock +++ b/poetry.lock @@ -286,8 +286,8 @@ PyYAML = ">=3.13,<6" [[package]] name = "libvcs" -version = "0.13.7" -description = "Lite, typed, python library wrapper for git, svn, mercurial, etc." +version = "0.17.0a0" +description = "Lite, typed, python utilities for Git, SVN, Mercurial, etc." category = "main" optional = false python-versions = ">=3.9,<4.0" @@ -864,6 +864,41 @@ category = "dev" optional = false python-versions = ">= 3.7" +[[package]] +name = "types-colorama" +version = "0.4.15" +description = "Typing stubs for colorama" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "types-PyYAML" +version = "6.0.11" +description = "Typing stubs for PyYAML" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "types-requests" +version = "2.28.11" +description = "Typing stubs for requests" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +types-urllib3 = "<1.27" + +[[package]] +name = "types-urllib3" +version = "1.26.24" +description = "Typing stubs for urllib3" +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "typing-extensions" version = "4.3.0" @@ -918,7 +953,7 @@ test = [] [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "18c3e8e315bd28dfe02ee204c454ac19189a6a380c6c1b28566b2a7f2578633a" +content-hash = "55a0ae92095a055f7e8f2407deef2dca506558d549a78a23ac4f0602292d46ac" [metadata.files] alabaster = [ @@ -1086,8 +1121,8 @@ kaptan = [ {file = "kaptan-0.5.12.tar.gz", hash = "sha256:1abd1f56731422fce5af1acc28801677a51e56f5d3c3e8636db761ed143c3dd2"}, ] libvcs = [ - {file = "libvcs-0.13.7-py3-none-any.whl", hash = "sha256:9c1feed9b2dd43d790ab4fb4410e0bf18809981a64585caa32cf642254a68fe4"}, - {file = "libvcs-0.13.7.tar.gz", hash = "sha256:90e08cc3f2d2801c8ff555bf952ead7af940485f28dc578b8d079ebcd24f4b57"}, + {file = "libvcs-0.17.0a0-py3-none-any.whl", hash = "sha256:edf5b53f83d0c92e54657fbacb784af26892729cf065ca62c35eaf48f1013aac"}, + {file = "libvcs-0.17.0a0.tar.gz", hash = "sha256:2b4f45d7fd19de3f73a66b44a9ba5702d3251b25b19b230ce5c1eef01231de6a"}, ] livereload = [ {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, @@ -1367,6 +1402,22 @@ tornado = [ {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, ] +types-colorama = [ + {file = "types-colorama-0.4.15.tar.gz", hash = "sha256:fd128b1e32f3fecec5f09df4366d21498ee86ea31fcf8b4e8f1ade6d0bbf9832"}, + {file = "types_colorama-0.4.15-py3-none-any.whl", hash = "sha256:9cdc88dcde9e8ebafb2fdfaf5cee260452f93e5c57eb5d8b2a7f65b836d4e5d0"}, +] +types-PyYAML = [ + {file = "types-PyYAML-6.0.11.tar.gz", hash = "sha256:7f7da2fd11e9bc1e5e9eb3ea1be84f4849747017a59fc2eee0ea34ed1147c2e0"}, + {file = "types_PyYAML-6.0.11-py3-none-any.whl", hash = "sha256:8f890028123607379c63550179ddaec4517dc751f4c527a52bb61934bf495989"}, +] +types-requests = [ + {file = "types-requests-2.28.11.tar.gz", hash = "sha256:7ee827eb8ce611b02b5117cfec5da6455365b6a575f5e3ff19f655ba603e6b4e"}, + {file = "types_requests-2.28.11-py3-none-any.whl", hash = "sha256:af5f55e803cabcfb836dad752bd6d8a0fc8ef1cd84243061c0e27dee04ccf4fd"}, +] +types-urllib3 = [ + {file = "types-urllib3-1.26.24.tar.gz", hash = "sha256:a1b3aaea7dda3eb1b51699ee723aadd235488e4dc4648e030f09bc429ecff42f"}, + {file = "types_urllib3-1.26.24-py3-none-any.whl", hash = "sha256:cf7918503d02d3576e503bbfb419b0e047c4617653bba09624756ab7175e15c9"}, +] typing-extensions = [ {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, diff --git a/pyproject.toml b/pyproject.toml index e54e34c5..243cf224 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,7 @@ vcspull = 'vcspull:cli.cli' python = "^3.9" click = "~8" kaptan = "*" -libvcs = "~0.13.7" +libvcs = "~0.17.0a0" colorama = ">=0.3.9" [tool.poetry.dev-dependencies] @@ -94,6 +94,11 @@ flake8-bugbear = "^22.8.23" flake8-comprehensions = "*" mypy = "*" +### Lint : Annotations ### +types-requests = "^2.28.11" +types-PyYAML = "^6.0.11" +types-colorama = "^0.4.15" + [tool.poetry.extras] docs = [ "sphinx", @@ -111,7 +116,25 @@ docs = [ test = ["pytest", "pytest-rerunfailures", "pytest-watcher"] coverage = ["codecov", "coverage", "pytest-cov"] format = ["black", "isort"] -lint = ["flake8", "flake8-bugbear", "flake8-comprehensions", "mypy"] +lint = [ + "flake8", + "flake8-bugbear", + "flake8-comprehensions", + "mypy", + "types-requests", + "types-PyYAML", + "types-colorama", +] + +[tool.mypy] +python_version = 3.9 +warn_unused_configs = true + +[[tool.mypy.overrides]] +module = [ + "kaptan.*", +] +ignore_missing_imports = true [tool.coverage.run] branch = true diff --git a/scripts/generate_gitlab.py b/scripts/generate_gitlab.py index 87dce4b4..aa1c3e0e 100755 --- a/scripts/generate_gitlab.py +++ b/scripts/generate_gitlab.py @@ -58,20 +58,20 @@ print("File %s not accesible" % (config_filename)) sys.exit(1) -result = requests.get( +response = requests.get( "%s/api/v4/groups/%s/projects" % (gitlab_host, gitlab_namespace), params={"include_subgroups": "true", "per_page": "100"}, headers={"Authorization": "Bearer %s" % (gitlab_token)}, ) -if 200 != result.status_code: - print("Error: ", result) +if 200 != response.status_code: + print("Error: ", response) sys.exit(1) path_prefix = os.getcwd() -config = {} +config: dict = {} -for group in result.json(): +for group in response.json(): url_to_repo = group["ssh_url_to_repo"].replace(":", "/") namespace_path = group["namespace"]["full_path"] reponame = group["path"] diff --git a/src/vcspull/cli/sync.py b/src/vcspull/cli/sync.py index c48c4acc..76ffe637 100644 --- a/src/vcspull/cli/sync.py +++ b/src/vcspull/cli/sync.py @@ -6,7 +6,9 @@ import click.shell_completion from click.shell_completion import CompletionItem -from libvcs.shortcuts import create_project_from_pip_url +from libvcs._internal.shortcuts import create_project +from libvcs.url import registry as url_tools +from vcspull.types import ConfigDict from ..config import filter_repos, find_config_files, load_configs @@ -21,13 +23,13 @@ def get_repo_completions( if ctx.params["config"] is None else load_configs(files=[ctx.params["config"]]) ) - found_repos = [] + found_repos: list[ConfigDict] = [] repo_terms = [incomplete] for repo_term in repo_terms: dir, vcs_url, name = None, None, None if any(repo_term.startswith(n) for n in ["./", "/", "~", "$HOME"]): - dir = repo_term + dir = dir elif any(repo_term.startswith(n) for n in ["http", "git", "svn", "hg"]): vcs_url = repo_term else: @@ -105,9 +107,21 @@ def update_repo(repo_dict): repo_dict = deepcopy(repo_dict) if "pip_url" not in repo_dict: repo_dict["pip_url"] = repo_dict.pop("url") + if "url" not in repo_dict: + repo_dict["url"] = repo_dict.pop("pip_url") repo_dict["progress_callback"] = progress_cb - r = create_project_from_pip_url(**repo_dict) # Creates the repo object + if repo_dict.get("vcs") is None: + vcs_matches = url_tools.registry.match(url=repo_dict["url"], is_explicit=True) + + if len(vcs_matches) == 0: + raise Exception(f"No vcs found for {repo_dict}") + if len(vcs_matches) > 1: + raise Exception(f"No exact matches for {repo_dict}") + + repo_dict["vcs"] = vcs_matches[0].vcs + + r = create_project(**repo_dict) # Creates the repo object r.update_repo(set_remotes=True) # Creates repo if not exists and fetches return r diff --git a/src/vcspull/config.py b/src/vcspull/config.py index e33e14d7..e68836ef 100644 --- a/src/vcspull/config.py +++ b/src/vcspull/config.py @@ -9,17 +9,23 @@ import logging import os import pathlib +import typing as t from typing import Literal, Optional, Union import kaptan -from libvcs.projects.git import GitRemote +from libvcs._internal.types import StrPath +from libvcs.sync.git import GitRemote from . import exc +from .types import ConfigDict, RawConfigDict from .util import get_config_dir, update_dict log = logging.getLogger(__name__) +if t.TYPE_CHECKING: + from typing_extensions import TypeGuard + def expand_dir( _dir: pathlib.Path, cwd: pathlib.Path = pathlib.Path.cwd() @@ -45,7 +51,7 @@ def expand_dir( return _dir -def extract_repos(config: dict, cwd=pathlib.Path.cwd()) -> list[dict]: +def extract_repos(config: RawConfigDict, cwd=pathlib.Path.cwd()) -> list[ConfigDict]: """Return expanded configuration. end-user configuration permit inline configuration shortcuts, expand to @@ -62,11 +68,11 @@ def extract_repos(config: dict, cwd=pathlib.Path.cwd()) -> list[dict]: ------- list : List of normalized repository information """ - configs = [] + configs: list[ConfigDict] = [] for directory, repos in config.items(): + assert isinstance(repos, dict) for repo, repo_data in repos.items(): - - conf = {} + conf: dict = {} """ repo_name: http://myrepo.com/repo.git @@ -91,21 +97,36 @@ def extract_repos(config: dict, cwd=pathlib.Path.cwd()) -> list[dict]: if "name" not in conf: conf["name"] = repo - if "parent_dir" not in conf: - conf["parent_dir"] = expand_dir(directory, cwd=cwd) - - # repo_dir -> dir in libvcs 0.12.0b25 - if "repo_dir" in conf and "dir" not in conf: - conf["dir"] = conf.pop("repo_dir") if "dir" not in conf: - conf["dir"] = expand_dir(conf["parent_dir"] / conf["name"], cwd) + conf["dir"] = expand_dir( + pathlib.Path(expand_dir(pathlib.Path(directory), cwd=cwd)) + / conf["name"], + cwd, + ) if "remotes" in conf: + assert isinstance(conf["remotes"], dict) for remote_name, url in conf["remotes"].items(): - conf["remotes"][remote_name] = GitRemote( - name=remote_name, fetch_url=url, push_url=url - ) + if isinstance(url, GitRemote): + continue + if isinstance(url, str): + conf["remotes"][remote_name] = GitRemote( + name=remote_name, fetch_url=url, push_url=url + ) + elif isinstance(url, dict): + assert "push_url" in url + assert "fetch_url" in url + conf["remotes"][remote_name] = GitRemote( + name=remote_name, **url + ) + + def is_valid_config_dict(val: t.Any) -> "TypeGuard[ConfigDict]": + assert isinstance(val, dict) + return True + + assert is_valid_config_dict(conf) + configs.append(conf) return configs @@ -142,7 +163,9 @@ def find_home_config_files( def find_config_files( path: Optional[Union[list[pathlib.Path], pathlib.Path]] = None, match: Union[list[str], str] = ["*"], - filetype: list[Literal["json", "yaml"]] = ["json", "yaml"], + filetype: Union[ + Literal["json", "yaml", "*"], list[Literal["json", "yaml", "*"]] + ] = ["json", "yaml"], include_home: bool = False, ): """Return repos from a directory and match. Not recursive. @@ -190,12 +213,12 @@ def find_config_files( configs.extend(find_config_files(path, match, f)) else: match = f"{match}.{filetype}" - configs = path.glob(match) + configs = list(path.glob(match)) return configs -def load_configs(files: list[Union[str, pathlib.Path]], cwd=pathlib.Path.cwd()): +def load_configs(files: list[StrPath], cwd=pathlib.Path.cwd()): """Return repos from a list of files. Parameters @@ -214,10 +237,11 @@ def load_configs(files: list[Union[str, pathlib.Path]], cwd=pathlib.Path.cwd()): ---- Validate scheme, check for duplicate destinations, VCS urls """ - repos = [] + repos: list[ConfigDict] = [] for file in files: if isinstance(file, str): file = pathlib.Path(file) + assert isinstance(file, pathlib.Path) ext = file.suffix.lstrip(".") conf = kaptan.Kaptan(handler=ext).import_config(str(file)) newrepos = extract_repos(conf.export("dict"), cwd=cwd) @@ -228,7 +252,7 @@ def load_configs(files: list[Union[str, pathlib.Path]], cwd=pathlib.Path.cwd()): dupes = detect_duplicate_repos(repos, newrepos) - if dupes: + if len(dupes) > 0: msg = ("repos with same path + different VCS detected!", dupes) raise exc.VCSPullException(msg) repos.extend(newrepos) @@ -236,43 +260,41 @@ def load_configs(files: list[Union[str, pathlib.Path]], cwd=pathlib.Path.cwd()): return repos -def detect_duplicate_repos(repos1: list[dict], repos2: list[dict]): +ConfigDictTuple = tuple[ConfigDict, ConfigDict] + + +def detect_duplicate_repos( + config1: list[ConfigDict], config2: list[ConfigDict] +) -> list[ConfigDictTuple]: """Return duplicate repos dict if repo_dir same and vcs different. Parameters ---------- - repos1 : dict - list of repo expanded dicts + config1 : list[ConfigDict] - repos2 : dict - list of repo expanded dicts + config2 : list[ConfigDict] Returns ------- - list of dict, or None - Duplicate repos + list[ConfigDictTuple] + List of duplicate tuples """ - dupes = [] - path_dupe_repos = [] + if not config1: + return [] - curpaths = [r["dir"] for r in repos1] - newpaths = [r["dir"] for r in repos2] - path_duplicates = list(set(curpaths).intersection(newpaths)) + dupes: list[ConfigDictTuple] = [] - if not path_duplicates: - return None + repo_dirs = { + pathlib.Path(repo["dir"]).parent / repo["name"]: repo for repo in config1 + } + repo_dirs_2 = { + pathlib.Path(repo["dir"]).parent / repo["name"]: repo for repo in config2 + } - path_dupe_repos.extend( - [r for r in repos2 if any(r["dir"] == p for p in path_duplicates)] - ) + for repo_dir, repo in repo_dirs.items(): + if repo_dir in repo_dirs_2: + dupes.append((repo, repo_dirs_2[repo_dir])) - if not path_dupe_repos: - return None - - for n in path_dupe_repos: - currepo = next((r for r in repos1 if r["dir"] == n["dir"]), None) - if n["url"] != currepo["url"]: - dupes += (n, currepo) return dupes @@ -302,11 +324,11 @@ def in_dir(config_dir=None, extensions: list[str] = [".yml", ".yaml", ".json"]): def filter_repos( - config: dict, - dir: Union[pathlib.Path, None] = None, + config: list[ConfigDict], + dir: Union[pathlib.Path, Literal["*"], None] = None, vcs_url: Union[str, None] = None, name: Union[str, None] = None, -): +) -> list[ConfigDict]: """Return a :py:obj:`list` list of repos from (expanded) config file. dir, vcs_url and name all support fnmatch. @@ -327,23 +349,35 @@ def filter_repos( list : Repos """ - repo_list = [] + repo_list: list[ConfigDict] = [] if dir: - repo_list.extend([r for r in config if fnmatch.fnmatch(r["parent_dir"], dir)]) + repo_list.extend( + [ + r + for r in config + if fnmatch.fnmatch(str(pathlib.Path(r["dir"]).parent), str(dir)) + ] + ) if vcs_url: repo_list.extend( - r for r in config if fnmatch.fnmatch(r.get("url", r.get("repo")), vcs_url) + r + for r in config + if fnmatch.fnmatch(str(r.get("url", r.get("repo"))), vcs_url) ) if name: - repo_list.extend([r for r in config if fnmatch.fnmatch(r.get("name"), name)]) + repo_list.extend( + [r for r in config if fnmatch.fnmatch(str(r.get("name")), name)] + ) return repo_list -def is_config_file(filename: str, extensions: list[str] = [".yml", ".yaml", ".json"]): +def is_config_file( + filename: str, extensions: Union[list[str], str] = [".yml", ".yaml", ".json"] +): """Return True if file has a valid config file type. Parameters diff --git a/src/vcspull/types.py b/src/vcspull/types.py new file mode 100644 index 00000000..6e088dc9 --- /dev/null +++ b/src/vcspull/types.py @@ -0,0 +1,27 @@ +import typing as t + +from typing_extensions import NotRequired, TypedDict + +from libvcs._internal.types import StrPath, VCSLiteral +from libvcs.sync.git import GitSyncRemoteDict + + +class RawConfigDict(t.TypedDict): + vcs: VCSLiteral + name: str + dir: StrPath + url: str + remotes: GitSyncRemoteDict + + +RawConfigDir = dict[str, RawConfigDict] +RawConfig = dict[str, RawConfigDir] + + +class ConfigDict(TypedDict): + vcs: t.Optional[VCSLiteral] + name: str + dir: StrPath + url: str + remotes: NotRequired[t.Optional[GitSyncRemoteDict]] + shell_command_after: NotRequired[t.Optional[t.List[str]]] diff --git a/tests/conftest.py b/tests/conftest.py index d9e7e335..d1aa0827 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,12 +2,13 @@ import pathlib import shutil import textwrap +import typing as t import pytest from libvcs._internal.run import run -from libvcs.projects.git import GitProject -from libvcs.shortcuts import create_project_from_pip_url +from libvcs._internal.shortcuts import create_project +from libvcs.sync.git import GitSync @pytest.fixture(autouse=True, scope="session") @@ -67,7 +68,7 @@ def clean(): @pytest.fixture def git_repo_kwargs(repos_path: pathlib.Path, git_dummy_repo_dir): - """Return kwargs for :func:`create_project_from_pip_url`.""" + """Return kwargs for :func:`create_project`.""" return { "url": "git+file://" + git_dummy_repo_dir, "parent_dir": str(repos_path), @@ -76,16 +77,26 @@ def git_repo_kwargs(repos_path: pathlib.Path, git_dummy_repo_dir): @pytest.fixture -def git_repo(git_repo_kwargs) -> GitProject: +def git_repo(git_repo_kwargs) -> GitSync: """Create an git repository for tests. Return repo.""" - repo = create_project_from_pip_url(**git_repo_kwargs) + repo = create_project(vcs="git", **git_repo_kwargs) repo.obtain(quiet=True) return repo +class DummyRepoProtocol(t.Protocol): + """Callback for repo fixture factory.""" + + def __call__(self, repo_name: str, testfile_filename: str = ...) -> str: + """Callback signature for subprocess communication.""" + ... + + @pytest.fixture -def create_git_dummy_repo(repos_path: pathlib.Path) -> pathlib.Path: - def fn(repo_name, testfile_filename="testfile.test"): +def create_git_dummy_repo( + repos_path: pathlib.Path, +) -> t.Generator[DummyRepoProtocol, None, None]: + def fn(repo_name: str, testfile_filename: str = "testfile.test"): repo_path = str(repos_path / repo_name) run(["git", "init", repo_name], cwd=str(repos_path)) @@ -100,7 +111,9 @@ def fn(repo_name, testfile_filename="testfile.test"): @pytest.fixture -def git_dummy_repo_dir(repos_path: pathlib.Path, create_git_dummy_repo): +def git_dummy_repo_dir( + repos_path: pathlib.Path, create_git_dummy_repo: DummyRepoProtocol +): """Create a git repo with 1 commit, used as a remote.""" return create_git_dummy_repo("dummyrepo") diff --git a/tests/fixtures/example.py b/tests/fixtures/example.py index 4c1799ac..9d2ed1ae 100644 --- a/tests/fixtures/example.py +++ b/tests/fixtures/example.py @@ -1,5 +1,8 @@ import os +from libvcs.sync.git import GitRemote +from vcspull.types import ConfigDict + config_dict = { "/home/me/myproject/study/": { "linux": "git+git://git.kernel.org/linux/torvalds/linux.git", @@ -30,51 +33,63 @@ }, } -config_dict_expanded = [ +config_dict_expanded: list[ConfigDict] = [ { + "vcs": "git", "name": "linux", - "parent_dir": "/home/me/myproject/study/", "dir": os.path.join("/home/me/myproject/study/", "linux"), "url": "git+git://git.kernel.org/linux/torvalds/linux.git", }, { + "vcs": "git", "name": "freebsd", - "parent_dir": "/home/me/myproject/study/", "dir": os.path.join("/home/me/myproject/study/", "freebsd"), "url": "git+https://github.com/freebsd/freebsd.git", }, { + "vcs": "git", "name": "sphinx", - "parent_dir": "/home/me/myproject/study/", "dir": os.path.join("/home/me/myproject/study/", "sphinx"), "url": "hg+https://bitbucket.org/birkenfeld/sphinx", }, { + "vcs": "git", "name": "docutils", - "parent_dir": "/home/me/myproject/study/", "dir": os.path.join("/home/me/myproject/study/", "docutils"), "url": "svn+http://svn.code.sf.net/p/docutils/code/trunk", }, { + "vcs": "git", "name": "kaptan", "url": "git+git@github.com:tony/kaptan.git", - "parent_dir": "/home/me/myproject/github_projects/", "dir": os.path.join("/home/me/myproject/github_projects/", "kaptan"), - "remotes": [ - {"remote_name": "upstream", "url": "git+https://github.com/emre/kaptan"}, - {"remote_name": "ms", "url": "git+https://github.com/ms/kaptan.git"}, - ], + "remotes": { + "upstream": GitRemote( + **{ + "name": "upstream", + "fetch_url": "git+https://github.com/emre/kaptan", + "push_url": "git+https://github.com/emre/kaptan", + } + ), + "ms": GitRemote( + **{ + "name": "ms", + "fetch_url": "git+https://github.com/ms/kaptan.git", + "push_url": "git+https://github.com/ms/kaptan.git", + } + ), + }, }, { + "vcs": "git", "name": ".vim", - "parent_dir": "/home/me/myproject", "dir": os.path.join("/home/me/myproject", ".vim"), "url": "git+git@github.com:tony/vim-config.git", "shell_command_after": ["ln -sf /home/me/.vim/.vimrc /home/me/.vimrc"], }, { + "vcs": "git", "name": ".tmux", - "parent_dir": "/home/me/myproject", "dir": os.path.join("/home/me/myproject", ".tmux"), "url": "git+git@github.com:tony/tmux-config.git", "shell_command_after": ["ln -sf /home/me/.tmux/.tmux.conf /home/me/.tmux.conf"], diff --git a/tests/test_config.py b/tests/test_config.py index 38672db2..e5d7713a 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -31,7 +31,7 @@ def test_simple_format(load_yaml): assert len(repos) == 1 repo = repos[0] - assert dir / "vcspull" == repo["parent_dir"] + assert dir / "vcspull" == repo["dir"].parent assert dir / "vcspull" / "libvcs" == repo["dir"] @@ -49,5 +49,5 @@ def test_relative_dir(load_yaml): assert len(repos) == 1 repo = repos[0] - assert dir / "relativedir" == repo["parent_dir"] + assert dir / "relativedir" == repo["dir"].parent assert dir / "relativedir" / "docutils" == repo["dir"] diff --git a/tests/test_config_file.py b/tests/test_config_file.py index 9cdf50dc..e3b21aab 100644 --- a/tests/test_config_file.py +++ b/tests/test_config_file.py @@ -163,12 +163,12 @@ def test_expandenv_and_homevars(): config1_expanded = extract_repos(config1) config2_expanded = extract_repos(config2) - paths = [r["parent_dir"] for r in config1_expanded] + paths = [r["dir"].parent for r in config1_expanded] assert expand_dir("${HOME}/github_projects/") in paths assert expand_dir("~/study/") in paths assert expand_dir("~") in paths - paths = [r["parent_dir"] for r in config2_expanded] + paths = [r["dir"].parent for r in config2_expanded] assert expand_dir("${HOME}/github_projects/") in paths assert expand_dir("~/study/") in paths @@ -214,7 +214,7 @@ def test_in_dir( def test_find_config_path_string( config_path: pathlib.Path, yaml_config: pathlib.Path, json_config: pathlib.Path ): - config_files = config.find_config_files(path=str(config_path)) + config_files = config.find_config_files(path=config_path) assert yaml_config in config_files assert json_config in config_files diff --git a/tests/test_repo.py b/tests/test_repo.py index ce05acec..ae6d2e08 100644 --- a/tests/test_repo.py +++ b/tests/test_repo.py @@ -1,10 +1,8 @@ """Tests for placing config dicts into :py:class:`Project` objects.""" -import os +import pathlib -from _pytest.compat import LEGACY_PATH - -from libvcs import BaseProject, GitProject, MercurialProject, SubversionProject -from libvcs.shortcuts import create_project_from_pip_url +from libvcs import BaseSync, GitSync, HgSync, SvnSync +from libvcs._internal.shortcuts import create_project from vcspull.config import filter_repos from .fixtures import example as fixtures @@ -46,6 +44,7 @@ def test_to_dictlist(): assert "name" in r assert "parent_dir" in r assert "url" in r + assert "vcs" in r if "remotes" in r: assert isinstance(r["remotes"], list) @@ -55,64 +54,60 @@ def test_to_dictlist(): assert "url" == remote -def test_vcs_url_scheme_to_object(tmpdir: LEGACY_PATH): +def test_vcs_url_scheme_to_object(tmp_path: pathlib.Path): """Verify `url` return {Git,Mercurial,Subversion}Project. - :class:`GitProject`, :class:`MercurialProject` or :class:`SubversionProject` + :class:`GitSync`, :class:`HgSync` or :class:`SvnSync` object based on the pip-style URL scheme. """ - git_repo = create_project_from_pip_url( - **{ - "pip_url": "git+git://git.myproject.org/MyProject.git@da39a3ee5e6b4b", - "dir": str(tmpdir.join("myproject1")), - } + git_repo = create_project( + vcs="git", + url="git+git://git.myproject.org/MyProject.git@da39a3ee5e6b4b", + dir=str(tmp_path / "myproject1"), ) # TODO cwd and name if duplicated should give an error - assert isinstance(git_repo, GitProject) - assert isinstance(git_repo, BaseProject) + assert isinstance(git_repo, GitSync) + assert isinstance(git_repo, BaseSync) - hg_repo = create_project_from_pip_url( - **{ - "pip_url": "hg+https://hg.myproject.org/MyProject#egg=MyProject", - "dir": str(tmpdir.join("myproject2")), - } + hg_repo = create_project( + vcs="hg", + url="hg+https://hg.myproject.org/MyProject#egg=MyProject", + dir=str(tmp_path / "myproject2"), ) - assert isinstance(hg_repo, MercurialProject) - assert isinstance(hg_repo, BaseProject) + assert isinstance(hg_repo, HgSync) + assert isinstance(hg_repo, BaseSync) - svn_repo = create_project_from_pip_url( - **{ - "pip_url": "svn+svn://svn.myproject.org/svn/MyProject#egg=MyProject", - "dir": str(tmpdir.join("myproject3")), - } + svn_repo = create_project( + vcs="svn", + url="svn+svn://svn.myproject.org/svn/MyProject#egg=MyProject", + dir=str(tmp_path / "myproject3"), ) - assert isinstance(svn_repo, SubversionProject) - assert isinstance(svn_repo, BaseProject) + assert isinstance(svn_repo, SvnSync) + assert isinstance(svn_repo, BaseSync) -def test_to_repo_objects(tmpdir: LEGACY_PATH): +def test_to_repo_objects(tmp_path: pathlib.Path): """:py:obj:`dict` objects into Project objects.""" repo_list = filter_repos(fixtures.config_dict_expanded) for repo_dict in repo_list: - r = create_project_from_pip_url(**repo_dict) + r = create_project(**repo_dict) # type: ignore - assert isinstance(r, BaseProject) - assert r.name - assert r.name == repo_dict["name"] - assert r.parent_dir - assert r.parent_dir == repo_dict["parent_dir"] + assert isinstance(r, BaseSync) + assert r.repo_name + assert r.repo_name == repo_dict["name"] + assert r.dir.parent assert r.url assert r.url == repo_dict["url"] - assert r.path == os.path.join(r.parent_dir, r.name) + assert r.dir == r.dir / r.repo_name - if "remotes" in repo_dict: - assert isinstance(r.remotes, list) + if hasattr(r, "remotes") and isinstance(r, GitSync): + assert isinstance(r.remotes, dict) for remote_name, remote_dict in r.remotes.items(): assert isinstance(remote_dict, dict) assert "fetch_url" in remote_dict diff --git a/tests/test_sync.py b/tests/test_sync.py index e9fcaa81..dbece9e3 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -1,15 +1,17 @@ import pathlib import textwrap -from typing import Callable, List +import typing as t import pytest import kaptan -from libvcs.projects.git import GitRemote -from libvcs.shortcuts import create_project_from_pip_url +from libvcs._internal.shortcuts import create_project +from libvcs.sync.git import GitRemote, GitSync +from tests.conftest import DummyRepoProtocol from vcspull.cli.sync import update_repo from vcspull.config import extract_repos, filter_repos, load_configs +from vcspull.types import ConfigDict from .helpers import write_config @@ -30,11 +32,18 @@ def test_makes_recursive( ) conf = conf.export("dict") repos = extract_repos(conf) + assert len(repos) > 0 - for r in filter_repos(repos): - repo = create_project_from_pip_url(**r) + filtered_repos = filter_repos(repos, dir="*") + assert len(filtered_repos) > 0 + + for r in filtered_repos: + assert isinstance(r, dict) + repo = create_project(**r) # type: ignore repo.obtain() + assert repo.dir.exists() + def write_config_remote( config_path: pathlib.Path, tmp_path: pathlib.Path, config_tpl, dir, clone_name @@ -79,11 +88,11 @@ def write_config_remote( ) def test_config_variations( tmp_path: pathlib.Path, - create_git_dummy_repo: Callable[[str], pathlib.Path], + create_git_dummy_repo: DummyRepoProtocol, config_tpl: str, capsys: pytest.CaptureFixture[str], - remote_list: List[str], -): + remote_list: t.List[str], +) -> None: """Test config output with variation of config formats""" dummy_repo_name = "dummy_repo" dummy_repo = create_git_dummy_repo(dummy_repo_name) @@ -103,8 +112,8 @@ def test_config_variations( for repo_dict in repos: repo_url = repo_dict["url"].replace("git+", "") - repo = update_repo(repo_dict) - remotes = repo.remotes() or [] + repo: GitSync = update_repo(repo_dict) + remotes = repo.remotes() or {} remote_names = set(remotes.keys()) assert set(remote_list).issubset(remote_names) or {"origin"}.issubset( remote_names @@ -112,6 +121,7 @@ def test_config_variations( for remote_name, remote_info in remotes.items(): current_remote = repo.remote(remote_name) + assert current_remote is not None assert current_remote.fetch_url == repo_url @@ -147,10 +157,10 @@ def test_config_variations( ) def test_updating_remote( tmp_path: pathlib.Path, - create_git_dummy_repo: Callable[[str], pathlib.Path], + create_git_dummy_repo: DummyRepoProtocol, config_tpl: str, - has_extra_remotes, -): + has_extra_remotes: bool, +) -> None: """Ensure additions/changes to yaml config are reflected""" dummy_repo_name = "dummy_repo" @@ -162,17 +172,17 @@ def test_updating_remote( repo_parent = tmp_path / "study" / "myrepo" repo_parent.mkdir(parents=True) - initial_config = { + initial_config: ConfigDict = { + "vcs": "git", "name": "myclone", "dir": f"{tmp_path}/study/myrepo/myclone", - "parent_dir": f"{tmp_path}/study/myrepo", "url": f"git+file://{dummy_repo}", "remotes": { - mirror_name: { - "name": mirror_name, - "fetch_url": f"git+file://{dummy_repo}", - "push_url": f"git+file://{dummy_repo}", - } + mirror_name: GitRemote( + name=mirror_name, + fetch_url=f"git+file://{dummy_repo}", + push_url=f"git+file://{dummy_repo}", + ) }, } @@ -184,25 +194,30 @@ def test_updating_remote( expected_remote_url = f"git+file://{mirror_repo}" - config = initial_config | { - "remotes": { - mirror_name: GitRemote( - name=mirror_name, - fetch_url=expected_remote_url, - push_url=expected_remote_url, - ) - } - } + expected_config: ConfigDict = initial_config.copy() + assert isinstance(expected_config["remotes"], dict) + expected_config["remotes"][mirror_name] = GitRemote( + name=mirror_name, + fetch_url=expected_remote_url, + push_url=expected_remote_url, + ) - repo_dict = filter_repos([config], name="myclone")[0] + repo_dict = filter_repos([expected_config], name="myclone")[0] repo = update_repo(repo_dict) for remote_name, remote_info in repo.remotes().items(): - current_remote_url = repo.remote(remote_name).fetch_url.replace("git+", "") - if remote_name in config["remotes"]: - assert ( - config["remotes"][remote_name].fetch_url.replace("git+", "") - == current_remote_url - ) - - elif remote_name == "origin": - assert config["url"].replace("git+", "") == current_remote_url + remote = repo.remote(remote_name) + if remote is not None: + current_remote_url = remote.fetch_url.replace("git+", "") + if remote_name in expected_config["remotes"]: + assert ( + expected_config["remotes"][remote_name].fetch_url.replace( + "git+", "" + ) + == current_remote_url + ) + + elif remote_name == "origin" and remote_name in expected_config["remotes"]: + assert ( + expected_config["remotes"]["origin"].fetch_url.replace("git+", "") + == current_remote_url + )