diff --git a/.tmuxp.yaml b/.tmuxp.yaml index 20b81abb0..8f7da0cb0 100644 --- a/.tmuxp.yaml +++ b/.tmuxp.yaml @@ -12,4 +12,4 @@ windows: - focus: true - pane - pane - - make start + - just start diff --git a/AGENTS.md b/AGENTS.md index bf8df42cd..9dd613d33 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -213,6 +213,44 @@ type """ ``` +### Doctests + +**All functions and methods MUST have working doctests.** Doctests serve as both documentation and tests. + +**CRITICAL RULES:** +- Doctests MUST actually execute - never comment out `asyncio.run()` or similar calls +- Doctests MUST NOT be converted to `.. code-block::` as a workaround (code-blocks don't run) +- If you cannot create a working doctest, **STOP and ask for help** + +**Available tools for doctests:** +- `doctest_namespace` fixtures: `tmp_path`, `asyncio`, `create_git_remote_repo`, `create_hg_remote_repo`, `create_svn_remote_repo`, `example_git_repo` +- Ellipsis for variable output: `# doctest: +ELLIPSIS` +- Update `pytest_plugin.py` to add new fixtures to `doctest_namespace` + +**`# doctest: +SKIP` is NOT permitted** - it's just another workaround that doesn't test anything. If a VCS binary might not be installed, pytest already handles skipping via `skip_if_binaries_missing`. Use the fixtures properly. + +**Async doctest pattern:** +```python +>>> async def example(): +... result = await some_async_function() +... return result +>>> asyncio.run(example()) +'expected output' +``` + +**Using fixtures in doctests:** +```python +>>> git = Git(path=tmp_path) # tmp_path from doctest_namespace +>>> git.run(['status']) +'...' +``` + +**When output varies, use ellipsis:** +```python +>>> git.clone(url=f'file://{create_git_remote_repo()}') # doctest: +ELLIPSIS +'Cloning into ...' +``` + ### Git Commit Standards Format commit messages as: @@ -257,6 +295,139 @@ EOF )" ``` +## Asyncio Development + +### Architecture + +libvcs async support is organized in `_async/` subpackages: + +``` +libvcs/ +├── _internal/ +│ ├── subprocess.py # Sync subprocess wrapper +│ └── async_subprocess.py # Async subprocess wrapper +├── cmd/ +│ ├── git.py # Git (sync) +│ └── _async/git.py # AsyncGit +├── sync/ +│ ├── git.py # GitSync (sync) +│ └── _async/git.py # AsyncGitSync +``` + +### Async Subprocess Patterns + +**Always use `communicate()` for subprocess I/O:** +```python +proc = await asyncio.create_subprocess_shell(...) +stdout, stderr = await proc.communicate() # Prevents deadlocks +``` + +**Use `asyncio.timeout()` for timeouts:** +```python +async with asyncio.timeout(300): + stdout, stderr = await proc.communicate() +``` + +**Handle BrokenPipeError gracefully:** +```python +try: + proc.stdin.write(data) + await proc.stdin.drain() +except BrokenPipeError: + pass # Process already exited - expected behavior +``` + +### Async API Conventions + +- **Class naming**: Use `Async` prefix: `AsyncGit`, `AsyncGitSync` +- **Callbacks**: Async APIs accept only async callbacks (no union types) +- **Shared logic**: Extract argument-building to sync functions, share with async + +```python +# Shared argument building (sync) +def build_clone_args(url: str, depth: int | None = None) -> list[str]: + args = ["clone", url] + if depth: + args.extend(["--depth", str(depth)]) + return args + +# Async method uses shared logic +async def clone(self, url: str, depth: int | None = None) -> str: + args = build_clone_args(url, depth) + return await self.run(args) +``` + +### Async Testing + +**pytest configuration:** +```toml +[tool.pytest.ini_options] +asyncio_mode = "strict" +asyncio_default_fixture_loop_scope = "function" +``` + +**Async fixture pattern:** +```python +@pytest_asyncio.fixture(loop_scope="function") +async def async_git_repo(tmp_path: Path) -> t.AsyncGenerator[AsyncGitSync, None]: + repo = AsyncGitSync(url="...", path=tmp_path / "repo") + await repo.obtain() + yield repo +``` + +**Parametrized async tests:** +```python +class CloneFixture(t.NamedTuple): + test_id: str + clone_kwargs: dict[str, t.Any] + expected: list[str] + +CLONE_FIXTURES = [ + CloneFixture("basic", {}, [".git"]), + CloneFixture("shallow", {"depth": 1}, [".git"]), +] + +@pytest.mark.parametrize( + list(CloneFixture._fields), + CLONE_FIXTURES, + ids=[f.test_id for f in CLONE_FIXTURES], +) +@pytest.mark.asyncio +async def test_clone(test_id: str, clone_kwargs: dict, expected: list) -> None: + ... +``` + +### Async Anti-Patterns + +**DON'T poll returncode:** +```python +# WRONG +while proc.returncode is None: + await asyncio.sleep(0.1) + +# RIGHT +await proc.wait() +``` + +**DON'T mix blocking calls in async code:** +```python +# WRONG +async def bad(): + subprocess.run(["git", "clone", url]) # Blocks event loop! + +# RIGHT +async def good(): + proc = await asyncio.create_subprocess_shell(...) + await proc.wait() +``` + +**DON'T close the event loop in tests:** +```python +# WRONG - breaks pytest-asyncio cleanup +loop = asyncio.get_running_loop() +loop.close() +``` + ## Debugging Tips When stuck in debugging loops: diff --git a/README.md b/README.md index 42d7d120c..ac99ae476 100644 --- a/README.md +++ b/README.md @@ -149,6 +149,46 @@ def test_my_git_tool( assert (checkout_path / ".git").is_dir() ``` +### 5. Async Support +Run VCS operations asynchronously for better concurrency when managing multiple repositories. + +[**Learn more about Async Support**](https://libvcs.git-pull.com/topics/asyncio.html) + +```python +import asyncio +import pathlib +from libvcs.sync._async.git import AsyncGitSync + +async def main(): + repo = AsyncGitSync( + url="https://github.com/vcs-python/libvcs", + path=pathlib.Path.cwd() / "libvcs", + ) + await repo.obtain() + await repo.update_repo() + +asyncio.run(main()) +``` + +Clone multiple repositories concurrently: + +```python +import asyncio +from libvcs.sync._async.git import AsyncGitSync + +async def clone_all(repos: list[tuple[str, str]]): + tasks = [ + AsyncGitSync(url=url, path=path).obtain() + for url, path in repos + ] + await asyncio.gather(*tasks) # All clone in parallel + +asyncio.run(clone_all([ + ("https://github.com/vcs-python/libvcs", "./libvcs"), + ("https://github.com/vcs-python/vcspull", "./vcspull"), +])) +``` + ## Project Information - **Python Support**: 3.10+ diff --git a/conftest.py b/conftest.py index 6a3efb34a..ff6f31870 100644 --- a/conftest.py +++ b/conftest.py @@ -10,16 +10,174 @@ from __future__ import annotations +import dataclasses +import time import typing as t +from collections import defaultdict import pytest if t.TYPE_CHECKING: import pathlib + from _pytest.fixtures import FixtureDef, SubRequest + from _pytest.terminal import TerminalReporter + pytest_plugins = ["pytester"] +@dataclasses.dataclass +class FixtureMetrics: + """Metrics collected during fixture execution.""" + + fixture_name: str + duration: float + cache_hit: bool | None = None # None if not applicable (non-repo fixture) + + +# Fixture profiling storage +_fixture_timings: dict[str, list[float]] = defaultdict(list) +_fixture_call_counts: dict[str, int] = defaultdict(int) +_fixture_cache_hits: dict[str, int] = defaultdict(int) +_fixture_cache_misses: dict[str, int] = defaultdict(int) + + +def pytest_addoption(parser: pytest.Parser) -> None: + """Add fixture profiling options.""" + group = parser.getgroup("libvcs", "libvcs fixture options") + group.addoption( + "--fixture-durations", + action="store", + type=int, + default=0, + metavar="N", + help="Show N slowest fixture setup times (N=0 for all)", + ) + group.addoption( + "--fixture-durations-min", + action="store", + type=float, + default=0.005, + metavar="SECONDS", + help="Minimum duration to show in fixture timing report (default: 0.005)", + ) + group.addoption( + "--run-performance", + action="store_true", + default=False, + help="Run performance tests (marked with @pytest.mark.performance)", + ) + + +def pytest_collection_modifyitems( + config: pytest.Config, + items: list[pytest.Item], +) -> None: + """Skip performance tests unless --run-performance is given.""" + if config.getoption("--run-performance"): + # --run-performance given: run all tests + return + + skip_performance = pytest.mark.skip(reason="need --run-performance option to run") + for item in items: + if "performance" in item.keywords: + item.add_marker(skip_performance) + + +@pytest.hookimpl(wrapper=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[t.Any], + request: SubRequest, +) -> t.Generator[None, t.Any, t.Any]: + """Wrap fixture setup to measure timing and track cache hits.""" + start = time.perf_counter() + try: + result = yield + # Track cache hits for fixtures that support it (RepoFixtureResult) + if hasattr(result, "from_cache"): + fixture_name = fixturedef.argname + if result.from_cache: + _fixture_cache_hits[fixture_name] += 1 + else: + _fixture_cache_misses[fixture_name] += 1 + return result + finally: + duration = time.perf_counter() - start + fixture_name = fixturedef.argname + _fixture_timings[fixture_name].append(duration) + _fixture_call_counts[fixture_name] += 1 + + +def pytest_terminal_summary( + terminalreporter: TerminalReporter, + exitstatus: int, + config: pytest.Config, +) -> None: + """Display fixture timing and cache statistics summary.""" + durations_count = config.option.fixture_durations + durations_min = config.option.fixture_durations_min + + # Skip if no timing requested (durations_count defaults to 0 meaning "off") + if durations_count == 0 and not config.option.verbose: + return + + # Build summary data + fixture_stats: list[tuple[str, float, int, float]] = [] + for name, times in _fixture_timings.items(): + total_time = sum(times) + call_count = len(times) + avg_time = total_time / call_count if call_count > 0 else 0 + fixture_stats.append((name, total_time, call_count, avg_time)) + + # Sort by total time descending + fixture_stats.sort(key=lambda x: x[1], reverse=True) + + # Filter by minimum duration + fixture_stats = [s for s in fixture_stats if s[1] >= durations_min] + + if not fixture_stats: + return + + # Limit count if specified + if durations_count > 0: + fixture_stats = fixture_stats[:durations_count] + + terminalreporter.write_sep("=", "fixture setup times") + terminalreporter.write_line("") + terminalreporter.write_line( + f"{'Fixture':<40} {'Total':>10} {'Calls':>8} {'Avg':>10}", + ) + terminalreporter.write_line("-" * 70) + + for name, total, calls, avg in fixture_stats: + terminalreporter.write_line( + f"{name:<40} {total:>9.3f}s {calls:>8} {avg:>9.3f}s", + ) + + # Display cache statistics if any repo fixtures were used + if _fixture_cache_hits or _fixture_cache_misses: + terminalreporter.write_line("") + terminalreporter.write_sep("=", "fixture cache statistics") + terminalreporter.write_line("") + terminalreporter.write_line( + f"{'Fixture':<40} {'Hits':>8} {'Misses':>8} {'Hit Rate':>10}", + ) + terminalreporter.write_line("-" * 70) + + # Combine hits and misses for all fixtures that have cache tracking + all_cache_fixtures = set(_fixture_cache_hits.keys()) | set( + _fixture_cache_misses.keys() + ) + for name in sorted(all_cache_fixtures): + hits = _fixture_cache_hits.get(name, 0) + misses = _fixture_cache_misses.get(name, 0) + total = hits + misses + hit_rate = (hits / total * 100) if total > 0 else 0 + terminalreporter.write_line( + f"{name:<40} {hits:>8} {misses:>8} {hit_rate:>9.1f}%", + ) + + @pytest.fixture(autouse=True) def add_doctest_fixtures( request: pytest.FixtureRequest, diff --git a/docs/cmd/index.md b/docs/cmd/index.md index 245dca168..8ee853cd1 100644 --- a/docs/cmd/index.md +++ b/docs/cmd/index.md @@ -21,6 +21,16 @@ The `libvcs.cmd` module provides Python wrappers for VCS command-line tools: - {mod}`libvcs.cmd.hg` - Mercurial commands - {mod}`libvcs.cmd.svn` - Subversion commands +### Async Variants + +Async equivalents are available in `libvcs.cmd._async`: + +- {class}`~libvcs.cmd._async.git.AsyncGit` - Async git commands +- {class}`~libvcs.cmd._async.hg.AsyncHg` - Async mercurial commands +- {class}`~libvcs.cmd._async.svn.AsyncSvn` - Async subversion commands + +See {doc}`/topics/asyncio` for usage patterns. + ### When to use `cmd` vs `sync` | Module | Use Case | diff --git a/docs/internals/async_run.md b/docs/internals/async_run.md new file mode 100644 index 000000000..a3756d53d --- /dev/null +++ b/docs/internals/async_run.md @@ -0,0 +1,8 @@ +# async_run - `libvcs._internal.async_run` + +Async equivalent of {mod}`libvcs._internal.run`. + +```{eval-rst} +.. automodule:: libvcs._internal.async_run + :members: +``` diff --git a/docs/internals/async_subprocess.md b/docs/internals/async_subprocess.md new file mode 100644 index 000000000..d0cb7e4d5 --- /dev/null +++ b/docs/internals/async_subprocess.md @@ -0,0 +1,8 @@ +# AsyncSubprocessCommand - `libvcs._internal.async_subprocess` + +Async equivalent of {mod}`libvcs._internal.subprocess`. + +```{eval-rst} +.. automodule:: libvcs._internal.async_subprocess + :members: +``` diff --git a/docs/internals/copy.md b/docs/internals/copy.md new file mode 100644 index 000000000..e6e5ef4ce --- /dev/null +++ b/docs/internals/copy.md @@ -0,0 +1,114 @@ +(copy)= + +# Copy Utilities + +```{module} libvcs._internal.copy +``` + +Copy utilities with reflink (copy-on-write) support for optimized directory operations. + +## Overview + +This module provides `copytree_reflink()`, an optimized directory copy function that +leverages filesystem-level copy-on-write (CoW) when available, with automatic fallback +to standard `shutil.copytree()` on unsupported filesystems. + +## Why Reflinks? + +Traditional file copying reads source bytes and writes them to the destination. On +modern copy-on-write filesystems like **Btrfs**, **XFS**, and **APFS**, reflinks +provide a more efficient alternative: + +| Operation | Traditional Copy | Reflink Copy | +|-----------|------------------|--------------| +| Bytes transferred | All file data | Metadata only | +| Time complexity | O(file size) | O(1) | +| Disk usage | 2x original | ~0 (shared blocks) | +| On modification | Original unchanged | CoW creates new blocks | + +### Filesystem Support + +| Filesystem | Reflink Support | Notes | +|------------|-----------------|-------| +| Btrfs | ✅ Native | Full CoW support | +| XFS | ✅ Native | Requires reflink=1 mount option | +| APFS | ✅ Native | macOS 10.13+ | +| ext4 | ❌ Fallback | Falls back to byte copy | +| NTFS | ❌ Fallback | Windows uses shutil.copytree | + +## Usage + +```python +from libvcs._internal.copy import copytree_reflink +import pathlib + +src = pathlib.Path("/path/to/source") +dst = pathlib.Path("/path/to/destination") + +# Simple copy +copytree_reflink(src, dst) + +# With ignore patterns +import shutil +copytree_reflink( + src, + dst, + ignore=shutil.ignore_patterns("*.pyc", "__pycache__"), +) +``` + +## API Reference + +```{eval-rst} +.. autofunction:: libvcs._internal.copy.copytree_reflink +``` + +## Implementation Details + +### Strategy + +The function uses a **reflink-first + fallback** strategy: + +1. **Try `cp --reflink=auto`** - On Linux, this command attempts a reflink copy + and silently falls back to regular copy if the filesystem doesn't support it +2. **Fallback to `shutil.copytree()`** - If `cp` fails (not found, permission issues, + or Windows), use Python's standard library + +### Ignore Patterns + +When using ignore patterns with `cp --reflink=auto`, the approach differs from +`shutil.copytree()`: + +- **shutil.copytree**: Applies patterns during copy (never copies ignored files) +- **cp --reflink**: Copies everything, then deletes ignored files + +This difference is acceptable because: +- The overhead of post-copy deletion is minimal for typical ignore patterns +- The performance gain from reflinks far outweighs this overhead on CoW filesystems + +## Use in pytest Fixtures + +This module is used by the `*_repo` fixtures in `libvcs.pytest_plugin` to create +isolated test workspaces from cached master copies: + +```python +# From pytest_plugin.py +from libvcs._internal.copy import copytree_reflink + +@pytest.fixture +def git_repo(...): + # ... + copytree_reflink( + master_copy, + new_checkout_path, + ignore=shutil.ignore_patterns(".libvcs_master_initialized"), + ) + # ... +``` + +### Benefits for Test Fixtures + +1. **Faster on CoW filesystems** - Users on Btrfs/XFS see 10-100x speedup +2. **No regression elsewhere** - ext4/Windows users see no performance change +3. **Safe for writable workspaces** - Tests can modify files; master stays unchanged +4. **Future-proof** - As more systems adopt CoW filesystems, benefits increase diff --git a/docs/internals/file_lock.md b/docs/internals/file_lock.md new file mode 100644 index 000000000..56ebd41d8 --- /dev/null +++ b/docs/internals/file_lock.md @@ -0,0 +1,79 @@ +# FileLock - `libvcs._internal.file_lock` + +Typed, asyncio-friendly file locking based on [filelock](https://github.com/tox-dev/filelock) patterns. + +## Overview + +This module provides portable file-based locking using the **SoftFileLock** pattern +(`os.O_CREAT | os.O_EXCL`) for atomic lock acquisition. It supports both synchronous +and asynchronous contexts. + +### Key Features + +- **Atomic acquisition**: Uses `os.O_CREAT | os.O_EXCL` for race-free lock creation +- **Reentrant locking**: Same thread can acquire lock multiple times +- **Stale lock detection**: Auto-removes locks older than configurable timeout (default 5min) +- **Async support**: {class}`~libvcs._internal.file_lock.AsyncFileLock` with `asyncio.sleep` polling +- **Two-file pattern**: Lock file (temporary) + marker file (permanent) +- **PID tracking**: Writes PID to lock file for debugging + +## Quick Start + +### Synchronous Usage + +```python +from libvcs._internal.file_lock import FileLock + +# Context manager (recommended) +with FileLock("/tmp/my.lock"): + # Critical section - only one process at a time + pass + +# Explicit acquire/release +lock = FileLock("/tmp/my.lock") +lock.acquire() +try: + # Critical section + pass +finally: + lock.release() +``` + +### Asynchronous Usage + +```python +import asyncio +from libvcs._internal.file_lock import AsyncFileLock + +async def main(): + async with AsyncFileLock("/tmp/my.lock"): + # Async critical section + pass + +asyncio.run(main()) +``` + +### Atomic Initialization + +The {func}`~libvcs._internal.file_lock.atomic_init` function implements the **two-file pattern** +for coordinating one-time initialization across multiple processes: + +```python +from libvcs._internal.file_lock import atomic_init + +def expensive_init(): + # One-time setup (e.g., clone repo, build cache) + pass + +# First call does initialization, subsequent calls skip +did_init = atomic_init("/path/to/resource", expensive_init) +``` + +## API Reference + +```{eval-rst} +.. automodule:: libvcs._internal.file_lock + :members: + :undoc-members: + :show-inheritance: +``` diff --git a/docs/internals/index.md b/docs/internals/index.md index fb6159812..80b8f7f3b 100644 --- a/docs/internals/index.md +++ b/docs/internals/index.md @@ -9,11 +9,15 @@ If you need an internal API stabilized please [file an issue](https://github.com ::: ```{toctree} +copy exc types dataclasses +file_lock query_list run +async_run subprocess +async_subprocess shortcuts ``` diff --git a/docs/pytest-plugin.md b/docs/pytest-plugin.md index 0a69035ef..69c1978db 100644 --- a/docs/pytest-plugin.md +++ b/docs/pytest-plugin.md @@ -131,6 +131,64 @@ def setup(set_hgconfig: None): pass ``` +## Async Fixtures + +For async testing with [pytest-asyncio], libvcs provides async fixture variants: + +[pytest-asyncio]: https://pytest-asyncio.readthedocs.io/ + +### Configuration + +Add pytest-asyncio to your test dependencies and configure strict mode: + +```toml +# pyproject.toml +[tool.pytest.ini_options] +asyncio_mode = "strict" +asyncio_default_fixture_loop_scope = "function" +``` + +### Available Async Fixtures + +- {func}`async_git_repo` - An {class}`~libvcs.sync._async.git.AsyncGitSync` instance ready for testing +- `async_create_git_remote_repo` - Factory to create temporary git repositories + +### Usage Example + +```python +import pytest + +@pytest.mark.asyncio +async def test_async_repo_operations(async_git_repo): + """Test async repository operations.""" + # async_git_repo is an AsyncGitSync instance + status = await async_git_repo.cmd.status() + assert 'On branch' in status + + # Update the repo + await async_git_repo.update_repo() +``` + +### Creating Repositories in Async Tests + +```python +import pytest +from libvcs.sync._async.git import AsyncGitSync + +@pytest.mark.asyncio +async def test_clone_repo(tmp_path, create_git_remote_repo): + """Test cloning a repository asynchronously.""" + remote = create_git_remote_repo() + repo = AsyncGitSync( + url=f'file://{remote}', + path=tmp_path / 'clone', + ) + await repo.obtain() + assert (tmp_path / 'clone' / '.git').exists() +``` + +See {doc}`/topics/asyncio` for more async patterns. + ## Examples For usage examples, refer to libvcs's own [tests/](https://github.com/vcs-python/libvcs/tree/master/tests). diff --git a/docs/quickstart.md b/docs/quickstart.md index 109beb5df..5eb3ae904 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -98,3 +98,41 @@ origin.prune() ``` See {doc}`/cmd/git/index` for the full API reference. + +### Async Usage + +All APIs have async equivalents for non-blocking operations: + +```python +import asyncio +from libvcs.cmd._async.git import AsyncGit + +async def main(): + git = AsyncGit(path='/path/to/repo') + + # Non-blocking git operations + await git.run(['init']) + status = await git.status() + print(status) + +asyncio.run(main()) +``` + +For repository synchronization: + +```python +import asyncio +from libvcs.sync._async.git import AsyncGitSync + +async def main(): + repo = AsyncGitSync( + url='https://github.com/vcs-python/libvcs', + path='/tmp/libvcs', + ) + await repo.obtain() # Clone + await repo.update_repo() # Update + +asyncio.run(main()) +``` + +See {doc}`/topics/asyncio` for the complete async guide. diff --git a/docs/sync/index.md b/docs/sync/index.md index 3365aada8..361bacdcc 100644 --- a/docs/sync/index.md +++ b/docs/sync/index.md @@ -14,6 +14,16 @@ versions. ::: +## Async Variants + +Async equivalents are available in `libvcs.sync._async`: + +- {class}`~libvcs.sync._async.git.AsyncGitSync` - Async git repository management +- {class}`~libvcs.sync._async.hg.AsyncHgSync` - Async mercurial repository management +- {class}`~libvcs.sync._async.svn.AsyncSvnSync` - Async subversion repository management + +See {doc}`/topics/asyncio` for usage patterns. + ```{toctree} :caption: API diff --git a/docs/topics/asyncio.md b/docs/topics/asyncio.md new file mode 100644 index 000000000..2f0b939d1 --- /dev/null +++ b/docs/topics/asyncio.md @@ -0,0 +1,226 @@ +(asyncio)= + +# Async Support + +libvcs provides **async equivalents** for all synchronous APIs, enabling +non-blocking VCS operations ideal for managing multiple repositories +concurrently. + +## Overview + +The async API mirrors the sync API with an `Async` prefix: + +| Sync Class | Async Equivalent | +|------------|------------------| +| {class}`~libvcs.cmd.git.Git` | {class}`~libvcs.cmd._async.git.AsyncGit` | +| {class}`~libvcs.cmd.hg.Hg` | {class}`~libvcs.cmd._async.hg.AsyncHg` | +| {class}`~libvcs.cmd.svn.Svn` | {class}`~libvcs.cmd._async.svn.AsyncSvn` | +| {class}`~libvcs.sync.git.GitSync` | {class}`~libvcs.sync._async.git.AsyncGitSync` | +| {class}`~libvcs.sync.hg.HgSync` | {class}`~libvcs.sync._async.hg.AsyncHgSync` | +| {class}`~libvcs.sync.svn.SvnSync` | {class}`~libvcs.sync._async.svn.AsyncSvnSync` | + +## Why Async? + +Async APIs excel when: + +- **Managing multiple repositories** - Clone/update repos concurrently +- **Building responsive applications** - UI remains responsive during VCS operations +- **Integration with async frameworks** - FastAPI, aiohttp, etc. +- **CI/CD pipelines** - Parallel repository operations + +## Basic Usage + +### Running Git Commands + +```python +>>> from libvcs.cmd._async.git import AsyncGit +>>> async def example(): +... git = AsyncGit(path=tmp_path) +... await git.run(['init']) +... status = await git.status() +... return 'On branch' in status +>>> asyncio.run(example()) +True +``` + +### Cloning a Repository + +```python +>>> from libvcs.cmd._async.git import AsyncGit +>>> async def clone_example(): +... repo_path = tmp_path / 'myrepo' +... git = AsyncGit(path=repo_path) +... url = f'file://{create_git_remote_repo()}' +... await git.clone(url=url) +... return (repo_path / '.git').exists() +>>> asyncio.run(clone_example()) +True +``` + +### Repository Synchronization + +For higher-level repository management, use the sync classes: + +```python +>>> from libvcs.sync._async.git import AsyncGitSync +>>> async def sync_example(): +... url = f'file://{create_git_remote_repo()}' +... repo_path = tmp_path / 'synced_repo' +... repo = AsyncGitSync(url=url, path=repo_path) +... await repo.obtain() # Clone +... await repo.update_repo() # Pull updates +... return (repo_path / '.git').exists() +>>> asyncio.run(sync_example()) +True +``` + +## Concurrent Operations + +The primary advantage of async is running operations concurrently: + +```python +>>> from libvcs.sync._async.git import AsyncGitSync +>>> async def concurrent_clone(): +... urls = [ +... f'file://{create_git_remote_repo()}', +... f'file://{create_git_remote_repo()}', +... ] +... tasks = [] +... for i, url in enumerate(urls): +... repo = AsyncGitSync(url=url, path=tmp_path / f'repo_{i}') +... tasks.append(repo.obtain()) +... await asyncio.gather(*tasks) # Clone all concurrently +... return all((tmp_path / f'repo_{i}' / '.git').exists() for i in range(2)) +>>> asyncio.run(concurrent_clone()) +True +``` + +## Progress Callbacks + +Async APIs support async progress callbacks for real-time output streaming: + +```python +>>> import datetime +>>> from libvcs._internal.async_run import async_run +>>> async def with_progress(): +... output_lines = [] +... async def progress(output: str, timestamp: datetime.datetime) -> None: +... output_lines.append(output) +... result = await async_run(['echo', 'hello'], callback=progress) +... return result.strip() +>>> asyncio.run(with_progress()) +'hello' +``` + +For sync callbacks, use the wrapper: + +```python +>>> from libvcs._internal.async_run import wrap_sync_callback +>>> def my_sync_callback(output: str, timestamp: datetime.datetime) -> None: +... print(output, end='') +>>> async_callback = wrap_sync_callback(my_sync_callback) +``` + +## Sync vs Async Comparison + +### Sync Pattern + +```python +from libvcs.sync.git import GitSync + +repo = GitSync(url="https://github.com/user/repo", path="/tmp/repo") +repo.obtain() # Blocks until complete +repo.update_repo() # Blocks again +``` + +### Async Pattern + +```python +import asyncio +from libvcs.sync._async.git import AsyncGitSync + +async def main(): + repo = AsyncGitSync(url="https://github.com/user/repo", path="/tmp/repo") + await repo.obtain() # Non-blocking + await repo.update_repo() + +asyncio.run(main()) +``` + +## Error Handling + +Async methods raise the same exceptions as sync equivalents: + +```python +>>> from libvcs import exc +>>> from libvcs._internal.async_run import async_run +>>> async def error_example(): +... try: +... await async_run(['git', 'nonexistent-command'], check_returncode=True) +... except exc.CommandError as e: +... return 'error caught' +... return 'no error' +>>> asyncio.run(error_example()) +'error caught' +``` + +### Timeout Handling + +```python +>>> from libvcs import exc +>>> from libvcs._internal.async_run import async_run +>>> async def timeout_example(): +... try: +... # Very short timeout for demo +... await async_run(['sleep', '10'], timeout=0.1) +... except exc.CommandTimeoutError: +... return 'timeout caught' +... return 'completed' +>>> asyncio.run(timeout_example()) +'timeout caught' +``` + +## Testing with pytest-asyncio + +Use the async fixtures for testing: + +```python +import pytest + +@pytest.mark.asyncio +async def test_repo_operations(async_git_repo): + # async_git_repo is an AsyncGitSync instance + status = await async_git_repo.cmd.status() + assert 'On branch' in status +``` + +See {doc}`/pytest-plugin` for available async fixtures. + +## When to Use Async + +| Scenario | Recommendation | +|----------|----------------| +| Single repository, simple script | Sync API (simpler) | +| Multiple repositories concurrently | **Async API** | +| Integration with async framework | **Async API** | +| CI/CD with parallel operations | **Async API** | +| Interactive CLI tool | Either (prefer sync for simplicity) | + +## API Reference + +### Command Classes + +- {class}`~libvcs.cmd._async.git.AsyncGit` - Async git commands +- {class}`~libvcs.cmd._async.hg.AsyncHg` - Async mercurial commands +- {class}`~libvcs.cmd._async.svn.AsyncSvn` - Async subversion commands + +### Sync Classes + +- {class}`~libvcs.sync._async.git.AsyncGitSync` - Async git repository management +- {class}`~libvcs.sync._async.hg.AsyncHgSync` - Async mercurial repository management +- {class}`~libvcs.sync._async.svn.AsyncSvnSync` - Async subversion repository management + +### Internal Utilities + +- {func}`~libvcs._internal.async_run.async_run` - Low-level async command execution +- {class}`~libvcs._internal.async_subprocess.AsyncSubprocessCommand` - Async subprocess wrapper diff --git a/docs/topics/index.md b/docs/topics/index.md index 4f281b886..b8edea0ac 100644 --- a/docs/topics/index.md +++ b/docs/topics/index.md @@ -9,6 +9,7 @@ with detailed explanations and runnable examples. ```{toctree} +asyncio traversing_git filtering url_parsing diff --git a/notes/plan.md b/notes/plan.md new file mode 100644 index 000000000..e754963c9 --- /dev/null +++ b/notes/plan.md @@ -0,0 +1,353 @@ +# libvcs Asyncio Support Implementation Plan + +## Implementation Status + +| Phase | Status | Description | +|-------|--------|-------------| +| Phase 1 | ✅ Complete | Core Async Subprocess (`async_subprocess.py`) | +| Phase 2 | ✅ Complete | Async Run Function (`async_run.py`) | +| Phase 3 | ✅ Complete | Async Command Classes (`AsyncGit`) | +| Phase 4 | ✅ Complete | Async Sync Classes (`AsyncGitSync`) | +| Phase 5 | ✅ Complete | Async pytest fixtures (`async_git_repo`) | +| Phase 6 | ✅ Complete | Async Mercurial (`AsyncHg`, `AsyncHgSync`) | +| Phase 7 | ✅ Complete | Async Subversion (`AsyncSvn`, `AsyncSvnSync`) | +| Phase 8 | ✅ Complete | Async Documentation | + +--- + +## Study Sources + +The following reference codebases were studied to inform this design: + +| Source | Path | Key Learnings | +|--------|------|---------------| +| **CPython asyncio** | `~/study/c/cpython/Lib/asyncio/` | Subprocess patterns, flow control, `communicate()` | +| **pytest** | `~/study/python/pytest/` | Fixture system internals, parametrization | +| **pytest-asyncio** | `~/study/python/pytest-asyncio/` | Async fixture wrapping, event loop management | +| **git** | `~/study/c/git/` | VCS command patterns | + +### Key Files Studied + +- `cpython/Lib/asyncio/subprocess.py` - High-level async subprocess API +- `cpython/Lib/asyncio/streams.py` - StreamReader/Writer with backpressure +- `cpython/Lib/asyncio/base_subprocess.py` - Protocol/Transport implementation +- `pytest-asyncio/pytest_asyncio/plugin.py` - Fixture wrapping, loop lifecycle + +--- + +## Decisions + +| Decision | Choice | Rationale | +|----------|--------|-----------| +| **Scope** | Full stack (subprocess → cmd → sync) | Complete async workflows | +| **Organization** | Separate `_async/` subpackages | Clean separation, maintainable | +| **Callbacks** | Async-only for async APIs | Better DX, typing, no runtime overhead | +| **Testing** | 100% coverage, pytest-asyncio strict mode | Reliability, isolation | +| **Typing** | Fully typed, no `Any` escapes | Type safety, IDE support | + +--- + +## Verification Before Commit + +**REQUIRED**: Before committing any phase or making a PR, run the full verification pipeline: + +```bash +uv run ruff check . --fix --show-fixes +uv run ruff format . +uv run mypy +uv run pytest --reruns 0 -vvv +``` + +All checks must pass: +- `ruff check`: No linting errors +- `ruff format`: No formatting changes needed +- `mypy`: Success with no type errors +- `pytest`: All tests pass (0 failures) + +--- + +## DOs + +### Subprocess Execution +- **DO** use `communicate()` for all subprocess I/O (prevents deadlocks) +- **DO** use `asyncio.timeout()` context manager for timeouts +- **DO** handle `BrokenPipeError` gracefully (process may exit early) +- **DO** use try/finally for resource cleanup + +### API Design +- **DO** keep sync and async APIs parallel in `_async/` subpackages +- **DO** share argument-building logic between sync/async variants +- **DO** use async-only callbacks for async APIs (no `inspect.isawaitable()`) +- **DO** provide `wrap_sync_callback()` helper for users with sync callbacks +- **DO** use `Async` prefix for async classes: `AsyncGit`, `AsyncGitSync` + +### Testing +- **DO** use strict mode for pytest-asyncio +- **DO** use function-scoped event loops for test isolation +- **DO** use `@pytest_asyncio.fixture` for async fixtures +- **DO** use `NamedTuple` with `test_id` for parametrized tests +- **DO** mirror sync test structure for async tests + +### Typing +- **DO** use `from __future__ import annotations` in all files +- **DO** use `import typing as t` namespace pattern +- **DO** provide explicit return type annotations +- **DO** use Protocol classes for callback types + +--- + +## DON'Ts + +### Subprocess Execution +- **DON'T** poll `returncode` - use `await proc.wait()` +- **DON'T** read stdout/stderr manually for bidirectional I/O +- **DON'T** close event loop in user code +- **DON'T** mix blocking `subprocess.run()` in async code +- **DON'T** create new event loops manually + +### API Design +- **DON'T** use union types for callbacks (`None | Awaitable[None]`) +- **DON'T** break backward compatibility of sync APIs +- **DON'T** leak event loop details into public APIs + +### Testing +- **DON'T** assume tests run concurrently (they're sequential) +- **DON'T** close event loop in tests (pytest-asyncio handles cleanup) +- **DON'T** mismatch fixture scope and loop scope + +--- + +## Implementation Phases + +### Phase 1: Core Async Subprocess (Foundation) + +**Goal:** Create async subprocess wrapper matching `SubprocessCommand` API. + +**Files to create:** +- `src/libvcs/_internal/async_subprocess.py` + +**Key patterns:** +```python +@dataclasses.dataclass +class AsyncSubprocessCommand: + args: list[str] + cwd: pathlib.Path | None = None + env: dict[str, str] | None = None + + async def run(self, *, check: bool = True, timeout: float | None = None) -> tuple[str, str, int]: + proc = await asyncio.create_subprocess_shell(...) + async with asyncio.timeout(timeout): + stdout, stderr = await proc.communicate() + return stdout.decode(), stderr.decode(), proc.returncode +``` + +**Tests:** +- `tests/_internal/test_async_subprocess.py` + +--- + +### Phase 2: Async Run Function + +**Goal:** Async equivalent of `_internal/run.py` with output parsing. + +**Files to create:** +- `src/libvcs/_internal/async_run.py` + +**Key considerations:** +- Reuse output parsing logic from `run.py` +- Async callback protocol: `async def callback(output: str, timestamp: datetime) -> None` +- Stream output line-by-line using `StreamReader.readline()` + +**Tests:** +- `tests/_internal/test_async_run.py` + +--- + +### Phase 3: Async Command Classes + +**Goal:** Async equivalents of `Git`, `Hg`, `Svn` command classes. + +**Files to create:** +- `src/libvcs/cmd/_async/__init__.py` +- `src/libvcs/cmd/_async/git.py` - `AsyncGit` +- `src/libvcs/cmd/_async/hg.py` - `AsyncHg` +- `src/libvcs/cmd/_async/svn.py` - `AsyncSvn` + +**Strategy:** +- Extract argument-building to shared functions +- Async methods call `await self.run()` instead of `self.run()` +- Manager classes (GitRemoteManager, etc.) get async variants + +**Tests:** +- `tests/cmd/_async/test_git.py` +- `tests/cmd/_async/test_hg.py` +- `tests/cmd/_async/test_svn.py` + +--- + +### Phase 4: Async Sync Classes + +**Goal:** Async equivalents of `GitSync`, `HgSync`, `SvnSync`. + +**Files to create:** +- `src/libvcs/sync/_async/__init__.py` +- `src/libvcs/sync/_async/base.py` - `AsyncBaseSync` +- `src/libvcs/sync/_async/git.py` - `AsyncGitSync` +- `src/libvcs/sync/_async/hg.py` - `AsyncHgSync` +- `src/libvcs/sync/_async/svn.py` - `AsyncSvnSync` + +**Key patterns:** +```python +class AsyncGitSync(AsyncBaseSync): + async def obtain(self, ...) -> None: + await self.cmd.clone(...) + + async def update_repo(self, ...) -> None: + await self.cmd.fetch(...) + await self.cmd.checkout(...) +``` + +**Tests:** +- `tests/sync/_async/test_git.py` +- `tests/sync/_async/test_hg.py` +- `tests/sync/_async/test_svn.py` + +--- + +### Phase 5: Async pytest Plugin + +**Goal:** Async fixture variants for testing. + +**Files to modify:** +- `src/libvcs/pytest_plugin.py` - Add async fixtures + +**New fixtures:** +- `async_create_git_remote_repo` +- `async_create_hg_remote_repo` +- `async_create_svn_remote_repo` +- `async_git_repo`, `async_hg_repo`, `async_svn_repo` + +**Pattern:** +```python +@pytest_asyncio.fixture(loop_scope="function") +async def async_git_repo(tmp_path: Path) -> t.AsyncGenerator[AsyncGitSync, None]: + repo = AsyncGitSync(url="...", path=tmp_path / "repo") + await repo.obtain() + yield repo +``` + +--- + +## Test Strategy + +### pytest Configuration + +```toml +# pyproject.toml +[tool.pytest.ini_options] +asyncio_mode = "strict" +asyncio_default_fixture_loop_scope = "function" +``` + +### Parametrized Test Pattern + +```python +class CloneFixture(t.NamedTuple): + test_id: str + clone_kwargs: dict[str, t.Any] + expected: list[str] + +CLONE_FIXTURES = [ + CloneFixture("basic", {}, [".git"]), + CloneFixture("shallow", {"depth": 1}, [".git"]), +] + +@pytest.mark.parametrize( + list(CloneFixture._fields), + CLONE_FIXTURES, + ids=[f.test_id for f in CLONE_FIXTURES], +) +@pytest.mark.asyncio +async def test_clone(test_id: str, clone_kwargs: dict, expected: list, ...) -> None: + ... +``` + +### Coverage Goal + +- **Target:** 100% coverage for all async code +- **Approach:** Mirror sync tests, add async-specific edge cases +- **Tools:** pytest-cov, pytest-asyncio + +--- + +## Type Strategy + +### Callback Types + +```python +# Sync callback (unchanged) +ProgressCallback = Callable[[str, datetime], None] + +# Async callback (for async APIs only) +AsyncProgressCallback = Callable[[str, datetime], Awaitable[None]] + +# Protocol for type checking +class AsyncProgressProtocol(t.Protocol): + async def __call__(self, output: str, timestamp: datetime) -> None: ... +``` + +### Helper for Sync Callback Users + +```python +def wrap_sync_callback( + sync_cb: Callable[[str, datetime], None] +) -> AsyncProgressProtocol: + """Wrap a sync callback for use with async APIs.""" + async def wrapper(output: str, timestamp: datetime) -> None: + sync_cb(output, timestamp) + return wrapper +``` + +--- + +## File Structure + +``` +src/libvcs/ +├── _internal/ +│ ├── subprocess.py # Existing sync +│ ├── async_subprocess.py # NEW: Async subprocess +│ ├── run.py # Existing sync +│ └── async_run.py # NEW: Async run +├── cmd/ +│ ├── git.py # Existing sync +│ ├── hg.py +│ ├── svn.py +│ └── _async/ # NEW +│ ├── __init__.py +│ ├── git.py # AsyncGit +│ ├── hg.py # AsyncHg +│ └── svn.py # AsyncSvn +├── sync/ +│ ├── base.py # Existing sync +│ ├── git.py +│ ├── hg.py +│ ├── svn.py +│ └── _async/ # NEW +│ ├── __init__.py +│ ├── base.py # AsyncBaseSync +│ ├── git.py # AsyncGitSync +│ ├── hg.py # AsyncHgSync +│ └── svn.py # AsyncSvnSync +└── pytest_plugin.py # Add async fixtures +``` + +--- + +## Success Criteria + +1. All async APIs pass mypy with strict mode +2. 100% test coverage for async code +3. All existing sync tests continue to pass +4. Documentation updated with async examples +5. pytest-asyncio strict mode works without warnings diff --git a/pyproject.toml b/pyproject.toml index c955403bc..1258e0cbb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,8 +78,9 @@ dev = [ # Testing "gp-libs", "pytest", - "pytest-rerunfailures", + "pytest-asyncio", "pytest-mock", + "pytest-rerunfailures", "pytest-watcher", # Coverage "codecov", @@ -106,8 +107,9 @@ docs = [ testing = [ "gp-libs", "pytest", - "pytest-rerunfailures", + "pytest-asyncio", "pytest-mock", + "pytest-rerunfailures", "pytest-watcher", ] coverage =[ @@ -233,6 +235,13 @@ testpaths = [ ] filterwarnings = [ "ignore:The frontend.Option(Parser)? class.*:DeprecationWarning::", + "ignore:The configuration option \"asyncio_default_fixture_loop_scope\" is unset.:DeprecationWarning:pytest_asyncio.plugin", +] +asyncio_mode = "strict" +asyncio_default_fixture_loop_scope = "function" +markers = [ + "performance: marks tests as performance tests (deselect with '-m \"not performance\"')", + "benchmark: marks tests as benchmark tests for comparing implementation methods", ] [tool.pytest-watcher] diff --git a/src/libvcs/_internal/async_run.py b/src/libvcs/_internal/async_run.py new file mode 100644 index 000000000..97564ed6f --- /dev/null +++ b/src/libvcs/_internal/async_run.py @@ -0,0 +1,300 @@ +"""Async subprocess execution with progress callbacks. + +Async equivalent of :mod:`libvcs._internal.run`. + +Note +---- +This is an internal API not covered by versioning policy. + +Examples +-------- +- :func:`~async_run`: Async command execution with progress callback. + + Before (sync): + + >>> from libvcs._internal.run import run + >>> output = run(['echo', 'hello'], check_returncode=True) + + With this (async): + + >>> from libvcs._internal.async_run import async_run + >>> async def example(): + ... output = await async_run(['echo', 'hello']) + ... return output.strip() + >>> asyncio.run(example()) + 'hello' +""" + +from __future__ import annotations + +import asyncio +import asyncio.subprocess +import datetime +import logging +import sys +import typing as t +from collections.abc import Mapping, Sequence + +from libvcs import exc +from libvcs._internal.types import StrOrBytesPath + +from .run import console_to_str + +logger = logging.getLogger(__name__) + + +class AsyncProgressCallbackProtocol(t.Protocol): + """Async callback to report subprocess communication. + + Async equivalent of :class:`~libvcs._internal.run.ProgressCallbackProtocol`. + + Examples + -------- + >>> async def my_progress(output: str, timestamp: datetime.datetime) -> None: + ... print(f"[{timestamp}] {output}", end="") + + See Also + -------- + libvcs._internal.run.ProgressCallbackProtocol : Sync equivalent + wrap_sync_callback : Helper to wrap sync callbacks for async use + """ + + async def __call__(self, output: str, timestamp: datetime.datetime) -> None: + """Process progress for subprocess communication.""" + ... + + +def wrap_sync_callback( + sync_cb: t.Callable[[str, datetime.datetime], None], +) -> AsyncProgressCallbackProtocol: + """Wrap a sync callback for use with async APIs. + + This helper allows users with existing sync callbacks to use them + with async APIs without modification. + + Parameters + ---------- + sync_cb : Callable[[str, datetime.datetime], None] + Synchronous callback function + + Returns + ------- + AsyncProgressCallbackProtocol + Async wrapper that calls the sync callback + + Examples + -------- + >>> def my_sync_progress(output: str, timestamp: datetime.datetime) -> None: + ... print(output, end="") + >>> async_cb = wrap_sync_callback(my_sync_progress) + >>> # Now use async_cb with async_run() + """ + + async def wrapper(output: str, timestamp: datetime.datetime) -> None: + sync_cb(output, timestamp) + + return wrapper + + +if sys.platform == "win32": + _ENV: t.TypeAlias = Mapping[str, str] +else: + _ENV: t.TypeAlias = Mapping[bytes, StrOrBytesPath] | Mapping[str, StrOrBytesPath] + +_CMD: t.TypeAlias = StrOrBytesPath | Sequence[StrOrBytesPath] + + +def _args_to_list(args: _CMD) -> list[str]: + """Convert command args to list of strings. + + Parameters + ---------- + args : str | bytes | Path | Sequence[str | bytes | Path] + Command arguments in various forms + + Returns + ------- + list[str] + Normalized list of string arguments + """ + from os import PathLike + + if isinstance(args, (str, bytes, PathLike)): + if isinstance(args, bytes): + return [args.decode()] + return [str(args)] + return [arg.decode() if isinstance(arg, bytes) else str(arg) for arg in args] + + +async def async_run( + args: _CMD, + *, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + check_returncode: bool = True, + callback: AsyncProgressCallbackProtocol | None = None, + timeout: float | None = None, +) -> str: + """Run a command asynchronously. + + Run 'args' and return stdout content (non-blocking). Optionally stream + stderr to a callback for progress reporting. Raises an exception if + the command exits non-zero (when check_returncode=True). + + This is the async equivalent of :func:`~libvcs._internal.run.run`. + + Parameters + ---------- + args : list[str] | str + The command to run + cwd : str | Path, optional + Working directory for the command + env : Mapping[str, str], optional + Environment variables for the command + check_returncode : bool, default True + If True, raise :class:`~libvcs.exc.CommandError` on non-zero exit + callback : AsyncProgressCallbackProtocol, optional + Async callback to receive stderr output in real-time. + Signature: ``async def callback(output: str, timestamp: datetime) -> None`` + timeout : float, optional + Timeout in seconds. Raises :class:`~libvcs.exc.CommandTimeoutError` + if exceeded. + + Returns + ------- + str + Combined stdout output + + Raises + ------ + libvcs.exc.CommandError + If check_returncode=True and process exits with non-zero code + libvcs.exc.CommandTimeoutError + If timeout is exceeded + + Examples + -------- + Basic usage: + + >>> async def example(): + ... output = await async_run(['echo', 'hello']) + ... return output.strip() + >>> asyncio.run(example()) + 'hello' + + With progress callback: + + >>> async def progress(output: str, timestamp: datetime.datetime) -> None: + ... pass # Handle progress output + >>> async def clone_example(): + ... url = f'file://{create_git_remote_repo()}' + ... output = await async_run(['git', 'clone', url, str(tmp_path / 'cb_repo')]) + ... return 'Cloning' in output or output == '' + >>> asyncio.run(clone_example()) + True + + See Also + -------- + libvcs._internal.run.run : Synchronous equivalent + AsyncSubprocessCommand : Lower-level async subprocess wrapper + """ + args_list = _args_to_list(args) + + # Create subprocess with pipes (using non-shell exec for security) + proc = await asyncio.subprocess.create_subprocess_exec( + *args_list, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + cwd=cwd, + env=env, + ) + + async def _run_with_callback() -> tuple[bytes, bytes, int]: + """Run subprocess, streaming stderr to callback.""" + stdout_data = b"" + stderr_data = b"" + + assert proc.stdout is not None + assert proc.stderr is not None + + # Read stderr line-by-line for progress callback + if callback is not None: + # Stream stderr to callback while collecting stdout + async def read_stderr() -> bytes: + collected = b"" + assert proc.stderr is not None + while True: + line = await proc.stderr.readline() + if not line: + break + collected += line + # Call progress callback with decoded line + await callback( + output=console_to_str(line), + timestamp=datetime.datetime.now(), + ) + return collected + + # Run stdout collection and stderr streaming concurrently + stdout_task = asyncio.create_task(proc.stdout.read()) + stderr_task = asyncio.create_task(read_stderr()) + + stdout_data, stderr_data = await asyncio.gather(stdout_task, stderr_task) + + # Send final carriage return (matching sync behavior) + await callback(output="\r", timestamp=datetime.datetime.now()) + else: + # No callback - just collect both streams + stdout_data, stderr_data = await proc.communicate() + + # Wait for process to complete + await proc.wait() + returncode = proc.returncode + assert returncode is not None + + return stdout_data, stderr_data, returncode + + try: + if timeout is not None: + stdout_bytes, stderr_bytes, returncode = await asyncio.wait_for( + _run_with_callback(), + timeout=timeout, + ) + else: + stdout_bytes, stderr_bytes, returncode = await _run_with_callback() + except asyncio.TimeoutError: + # Kill process on timeout + proc.kill() + await proc.wait() + raise exc.CommandTimeoutError( + output="Command timed out", + returncode=-1, + cmd=args_list, + ) from None + + # Process stdout: strip and join lines (matching sync behavior) + if stdout_bytes: + lines = filter( + None, + (line.strip() for line in stdout_bytes.splitlines()), + ) + output = console_to_str(b"\n".join(lines)) + else: + output = "" + + # On error, use stderr content + if returncode != 0 and stderr_bytes: + stderr_lines = filter( + None, + (line.strip() for line in stderr_bytes.splitlines()), + ) + output = console_to_str(b"".join(stderr_lines)) + + if returncode != 0 and check_returncode: + raise exc.CommandError( + output=output, + returncode=returncode, + cmd=args_list, + ) + + return output diff --git a/src/libvcs/_internal/async_subprocess.py b/src/libvcs/_internal/async_subprocess.py new file mode 100644 index 000000000..94bcf021c --- /dev/null +++ b/src/libvcs/_internal/async_subprocess.py @@ -0,0 +1,393 @@ +# ruff: noqa: A002 +r"""Async invocable :mod:`subprocess` wrapper. + +Async equivalent of :mod:`libvcs._internal.subprocess`. + +Note +---- +This is an internal API not covered by versioning policy. + +Examples +-------- +- :class:`~AsyncSubprocessCommand`: Async wrapper for subprocess execution. + + Before (sync): + + >>> import subprocess + >>> subprocess.run( + ... ['echo', 'hi'], + ... capture_output=True, text=True + ... ).stdout + 'hi\n' + + With this (async): + + >>> async def example(): + ... cmd = AsyncSubprocessCommand(['echo', 'hi']) + ... result = await cmd.run() + ... return result.stdout + >>> asyncio.run(example()) + b'hi\n' +""" + +from __future__ import annotations + +import asyncio +import asyncio.subprocess +import dataclasses +import subprocess +import typing as t +from collections.abc import Mapping, Sequence + +from libvcs._internal.types import StrOrBytesPath + +from .dataclasses import SkipDefaultFieldsReprMixin + +#: Command type alias +_CMD: t.TypeAlias = StrOrBytesPath | Sequence[StrOrBytesPath] + +#: Environment type alias +_ENV: t.TypeAlias = Mapping[str, str] + + +@dataclasses.dataclass +class AsyncCompletedProcess(t.Generic[t.AnyStr]): + """Result of an async subprocess execution. + + Mirrors :class:`subprocess.CompletedProcess` for async context. + + Parameters + ---------- + args : list[str] + The command arguments + returncode : int + Exit code of the process + stdout : str | bytes | None + Captured stdout, if any + stderr : str | bytes | None + Captured stderr, if any + """ + + args: list[str] + returncode: int + stdout: t.AnyStr | None = None + stderr: t.AnyStr | None = None + + def check_returncode(self) -> None: + """Raise CalledProcessError if returncode is non-zero. + + Raises + ------ + subprocess.CalledProcessError + If the process exited with a non-zero code + """ + if self.returncode != 0: + raise subprocess.CalledProcessError( + self.returncode, + self.args, + self.stdout, + self.stderr, + ) + + +@dataclasses.dataclass(repr=False) +class AsyncSubprocessCommand(SkipDefaultFieldsReprMixin): + r"""Async subprocess command wrapper. + + Wraps asyncio subprocess execution in a dataclass for inspection + and mutation before invocation. + + Parameters + ---------- + args : list[str] + Command and arguments to run + cwd : str | Path, optional + Working directory for the command + env : dict[str, str], optional + Environment variables for the command + + Examples + -------- + >>> import asyncio + >>> async def example(): + ... cmd = AsyncSubprocessCommand(['echo', 'hello']) + ... result = await cmd.run() + ... return result.stdout + >>> asyncio.run(example()) + b'hello\n' + + Modify before running: + + >>> cmd = AsyncSubprocessCommand(['echo', 'hi']) + >>> cmd.args + ['echo', 'hi'] + >>> cmd.args[1] = 'hello' + >>> cmd.args + ['echo', 'hello'] + """ + + args: _CMD + cwd: StrOrBytesPath | None = None + env: _ENV | None = None + + # Limits for stdout/stderr + limit: int = 2**16 # 64 KiB default buffer + + def _args_as_list(self) -> list[str]: + """Convert args to list of strings for asyncio.""" + from os import PathLike + + args = self.args + if isinstance(args, (str, bytes, PathLike)): + # Single command (str, bytes, or PathLike) + return [str(args) if not isinstance(args, bytes) else args.decode()] + # At this point, args is Sequence[StrOrBytesPath] + return [ + str(arg) if not isinstance(arg, bytes) else arg.decode() for arg in args + ] + + async def _create_process( + self, + *, + stdin: int | None = None, + stdout: int | None = None, + stderr: int | None = None, + ) -> asyncio.subprocess.Process: + """Create an async subprocess. + + Uses asyncio.create_subprocess_exec for secure, non-shell execution. + """ + args_list = self._args_as_list() + # Use asyncio's subprocess creation (non-shell variant for security) + return await asyncio.subprocess.create_subprocess_exec( + *args_list, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=self.cwd, + env=self.env, + limit=self.limit, + ) + + @t.overload + async def run( + self, + *, + check: bool = ..., + timeout: float | None = ..., + input: bytes | None = ..., + text: t.Literal[False] = ..., + ) -> AsyncCompletedProcess[bytes]: ... + + @t.overload + async def run( + self, + *, + check: bool = ..., + timeout: float | None = ..., + input: str | None = ..., + text: t.Literal[True], + ) -> AsyncCompletedProcess[str]: ... + + @t.overload + async def run( + self, + *, + check: bool = ..., + timeout: float | None = ..., + input: str | bytes | None = ..., + text: bool = ..., + ) -> AsyncCompletedProcess[t.Any]: ... + + async def run( + self, + *, + check: bool = False, + timeout: float | None = None, + input: str | bytes | None = None, + text: bool = False, + ) -> AsyncCompletedProcess[t.Any]: + r"""Run command asynchronously and return completed process. + + Uses asyncio subprocess APIs for non-blocking operation. + + Parameters + ---------- + check : bool, default False + If True, raise CalledProcessError on non-zero exit + timeout : float, optional + Timeout in seconds. Raises asyncio.TimeoutError if exceeded. + input : str | bytes, optional + Data to send to stdin + text : bool, default False + If True, decode stdout/stderr as text + + Returns + ------- + AsyncCompletedProcess + Result with args, returncode, stdout, stderr + + Raises + ------ + subprocess.CalledProcessError + If check=True and process exits with non-zero code + asyncio.TimeoutError + If timeout is exceeded + + Examples + -------- + >>> import asyncio + >>> async def example(): + ... cmd = AsyncSubprocessCommand(['echo', 'hello']) + ... result = await cmd.run(text=True) + ... return result.stdout.strip() + >>> asyncio.run(example()) + 'hello' + """ + args_list = self._args_as_list() + + # Prepare input as bytes + input_bytes: bytes | None = None + if input is not None: + input_bytes = input.encode() if isinstance(input, str) else input + + # Create subprocess + proc = await self._create_process( + stdin=asyncio.subprocess.PIPE if input_bytes else None, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + + try: + # Use communicate() with optional timeout via wait_for + if timeout is not None: + stdout_bytes, stderr_bytes = await asyncio.wait_for( + proc.communicate(input_bytes), + timeout=timeout, + ) + else: + stdout_bytes, stderr_bytes = await proc.communicate(input_bytes) + except asyncio.TimeoutError: + # Kill process on timeout + proc.kill() + await proc.wait() + raise + + # Get return code (should be set after communicate) + returncode = proc.returncode + assert returncode is not None, "returncode should be set after communicate()" + + # Decode if text mode + stdout: str | bytes | None = stdout_bytes + stderr: str | bytes | None = stderr_bytes + if text: + stdout = stdout_bytes.decode() if stdout_bytes else "" + stderr = stderr_bytes.decode() if stderr_bytes else "" + + result: AsyncCompletedProcess[t.Any] = AsyncCompletedProcess( + args=args_list, + returncode=returncode, + stdout=stdout, + stderr=stderr, + ) + + if check: + result.check_returncode() + + return result + + async def check_output( + self, + *, + timeout: float | None = None, + input: str | bytes | None = None, + text: bool = False, + ) -> str | bytes: + r"""Run command and return stdout, raising on non-zero exit. + + Parameters + ---------- + timeout : float, optional + Timeout in seconds + input : str | bytes, optional + Data to send to stdin + text : bool, default False + If True, return stdout as text + + Returns + ------- + str | bytes + Command stdout + + Raises + ------ + subprocess.CalledProcessError + If process exits with non-zero code + asyncio.TimeoutError + If timeout is exceeded + + Examples + -------- + >>> import asyncio + >>> async def example(): + ... cmd = AsyncSubprocessCommand(['echo', 'hello']) + ... return await cmd.check_output(text=True) + >>> asyncio.run(example()) + 'hello\n' + """ + result = await self.run(check=True, timeout=timeout, input=input, text=text) + return result.stdout if result.stdout is not None else (b"" if not text else "") + + async def wait( + self, + *, + timeout: float | None = None, + ) -> int: + """Run command and return exit code. + + Discards stdout/stderr. + + Parameters + ---------- + timeout : float, optional + Timeout in seconds + + Returns + ------- + int + Process exit code + + Raises + ------ + asyncio.TimeoutError + If timeout is exceeded + + Examples + -------- + >>> import asyncio + >>> async def example(): + ... cmd = AsyncSubprocessCommand(['true']) + ... return await cmd.wait() + >>> asyncio.run(example()) + 0 + """ + proc = await self._create_process( + stdin=asyncio.subprocess.DEVNULL, + stdout=asyncio.subprocess.DEVNULL, + stderr=asyncio.subprocess.DEVNULL, + ) + + try: + if timeout is not None: + returncode = await asyncio.wait_for( + proc.wait(), + timeout=timeout, + ) + else: + returncode = await proc.wait() + except asyncio.TimeoutError: + proc.kill() + await proc.wait() + raise + + return returncode diff --git a/src/libvcs/_internal/copy.py b/src/libvcs/_internal/copy.py new file mode 100644 index 000000000..3e2bafb26 --- /dev/null +++ b/src/libvcs/_internal/copy.py @@ -0,0 +1,124 @@ +"""Copy utilities with reflink (copy-on-write) support. + +This module provides optimized directory copy operations that leverage +filesystem-level copy-on-write (CoW) when available, with automatic +fallback to standard copying on unsupported filesystems. + +On Btrfs, XFS, and APFS filesystems, reflink copies are significantly faster +as they only copy metadata - the actual data blocks are shared until modified. +On ext4 and other filesystems, `cp --reflink=auto` silently falls back to +regular copying with no performance penalty. +""" + +from __future__ import annotations + +import os +import pathlib +import shutil +import subprocess +import typing as t + + +def copytree_reflink( + src: pathlib.Path, + dst: pathlib.Path, + ignore: t.Callable[..., t.Any] | None = None, +) -> pathlib.Path: + """Copy directory tree using reflink (CoW) if available, fallback to copytree. + + On Btrfs/XFS/APFS, this is significantly faster as it only copies metadata. + On ext4 and other filesystems, `cp --reflink=auto` silently falls back to + regular copy. + + Parameters + ---------- + src : pathlib.Path + Source directory to copy. + dst : pathlib.Path + Destination directory (must not exist). + ignore : callable, optional + Passed to shutil.copytree for fallback. For cp, patterns are applied + after copy by deleting ignored files. + + Returns + ------- + pathlib.Path + The destination path. + + Examples + -------- + >>> import pathlib + >>> src = tmp_path / "source" + >>> src.mkdir() + >>> (src / "file.txt").write_text("hello") + 5 + >>> dst = tmp_path / "dest" + >>> result = copytree_reflink(src, dst) + >>> (result / "file.txt").read_text() + 'hello' + + With ignore patterns: + + >>> import shutil + >>> src2 = tmp_path / "source2" + >>> src2.mkdir() + >>> (src2 / "keep.txt").write_text("keep") + 4 + >>> (src2 / "skip.pyc").write_text("skip") + 4 + >>> dst2 = tmp_path / "dest2" + >>> result2 = copytree_reflink(src2, dst2, ignore=shutil.ignore_patterns("*.pyc")) + >>> (result2 / "keep.txt").exists() + True + >>> (result2 / "skip.pyc").exists() + False + """ + dst.parent.mkdir(parents=True, exist_ok=True) + + try: + # Try cp --reflink=auto (Linux) - silent fallback on unsupported FS + subprocess.run( + ["cp", "-a", "--reflink=auto", str(src), str(dst)], + check=True, + capture_output=True, + timeout=60, + ) + except (subprocess.CalledProcessError, FileNotFoundError, OSError): + # Fallback to shutil.copytree (Windows, cp not found, etc.) + return pathlib.Path(shutil.copytree(src, dst, ignore=ignore)) + else: + # cp succeeded - apply ignore patterns if needed + if ignore is not None: + _apply_ignore_patterns(dst, ignore) + return dst + + +def _apply_ignore_patterns( + dst: pathlib.Path, + ignore: t.Callable[[str, list[str]], t.Iterable[str]], +) -> None: + """Remove files matching ignore patterns after cp --reflink copy. + + This function walks the destination directory and removes any files or + directories that match the ignore patterns. This is necessary because + `cp` doesn't support ignore patterns directly. + + Parameters + ---------- + dst : pathlib.Path + Destination directory to clean up. + ignore : callable + A callable that takes (directory, names) and returns names to ignore. + Compatible with shutil.ignore_patterns(). + """ + for root, dirs, files in os.walk(dst, topdown=True): + root_path = pathlib.Path(root) + ignored = set(ignore(root, dirs + files)) + for name in ignored: + target = root_path / name + if target.is_dir(): + shutil.rmtree(target) + elif target.exists(): + target.unlink() + # Modify dirs in-place to skip ignored directories during walk + dirs[:] = [d for d in dirs if d not in ignored] diff --git a/src/libvcs/_internal/file_lock.py b/src/libvcs/_internal/file_lock.py new file mode 100644 index 000000000..79fe1aba9 --- /dev/null +++ b/src/libvcs/_internal/file_lock.py @@ -0,0 +1,1025 @@ +"""Typed, asyncio-friendly file locking based on filelock patterns. + +This module provides atomic file-based locking with support for both synchronous +and asynchronous contexts. It uses the SoftFileLock pattern (``os.O_CREAT | os.O_EXCL``) +for portable, atomic lock acquisition. + +Note +---- +This is an internal API not covered by versioning policy. + +Design Principles +----------------- +1. **Atomic acquisition**: Uses ``os.O_CREAT | os.O_EXCL`` for race-free lock creation +2. **Reentrant locking**: Same thread can acquire lock multiple times +3. **Stale lock detection**: Auto-removes locks older than configurable timeout +4. **Async support**: :class:`AsyncFileLock` wraps sync lock with ``asyncio.sleep`` +5. **PID tracking**: Writes PID to lock file for debugging +6. **Two-file pattern**: Lock file (temporary) + marker file (permanent) + +Examples +-------- +Basic synchronous usage: + +>>> import tempfile +>>> import pathlib +>>> with tempfile.TemporaryDirectory() as tmpdir: +... lock_path = pathlib.Path(tmpdir) / "my.lock" +... lock = FileLock(lock_path) +... with lock: +... # Critical section - only one process at a time +... pass +... lock.is_locked +False + +Async usage: + +>>> async def example(): +... import tempfile +... import pathlib +... with tempfile.TemporaryDirectory() as tmpdir: +... lock_path = pathlib.Path(tmpdir) / "my.lock" +... async with AsyncFileLock(lock_path): +... # Async critical section +... pass +... return "done" +>>> asyncio.run(example()) +'done' + +Two-file atomic initialization pattern: + +>>> def do_expensive_init(): +... pass # Expensive one-time setup +>>> with tempfile.TemporaryDirectory() as tmpdir: +... path = pathlib.Path(tmpdir) / "resource" +... path.mkdir() +... result = atomic_init(path, do_expensive_init) +... result # True if we did init, False if another process did +True + +See Also +-------- +- filelock: The library that inspired this implementation +- pytest's make_numbered_dir_with_cleanup: Similar atomic init pattern +""" + +from __future__ import annotations + +import asyncio +import contextlib +import dataclasses +import os +import pathlib +import shutil +import time +import typing as t +from types import TracebackType + +if t.TYPE_CHECKING: + from typing_extensions import Self + +__all__ = [ + "AcquireReturnProxy", + "AsyncAcquireReturnProxy", + "AsyncFileLock", + "FileLock", + "FileLockContext", + "FileLockError", + "FileLockStale", + "FileLockTimeout", + "async_atomic_init", + "atomic_init", +] + + +# ============================================================================= +# Exceptions +# ============================================================================= + + +class FileLockError(Exception): + """Base exception for file lock errors. + + All file lock-related exceptions inherit from this class, making it easy + to catch any lock-related error with a single except clause. + + Examples + -------- + >>> try: + ... raise FileLockError("Lock failed") + ... except FileLockError as e: + ... str(e) + 'Lock failed' + """ + + +class FileLockTimeout(FileLockError, TimeoutError): + """Raised when lock acquisition times out. + + This exception inherits from both :class:`FileLockError` and + :class:`TimeoutError`, allowing it to be caught by either. + + Parameters + ---------- + lock_file : str + Path to the lock file that could not be acquired. + timeout : float + Timeout value in seconds that was exceeded. + + Examples + -------- + >>> exc = FileLockTimeout("/tmp/my.lock", 30.0) + >>> str(exc) + 'Timeout (30.0s) waiting for lock: /tmp/my.lock' + >>> exc.lock_file + '/tmp/my.lock' + >>> exc.timeout + 30.0 + """ + + def __init__(self, lock_file: str, timeout: float) -> None: + #: Path to the lock file + self.lock_file = lock_file + #: Timeout in seconds + self.timeout = timeout + super().__init__(f"Timeout ({timeout}s) waiting for lock: {lock_file}") + + def __reduce__(self) -> tuple[type[FileLockTimeout], tuple[str, float]]: + """Support pickling for multiprocessing.""" + return self.__class__, (self.lock_file, self.timeout) + + +class FileLockStale(FileLockError): + """Informational exception for stale lock detection. + + This exception is raised when a stale lock is detected but cannot be + removed. It's primarily informational and can be caught to log warnings. + + Parameters + ---------- + lock_file : str + Path to the stale lock file. + age_seconds : float + Age of the lock file in seconds. + + Examples + -------- + >>> exc = FileLockStale("/tmp/my.lock", 3600.0) + >>> str(exc) + 'Stale lock (3600.0s old): /tmp/my.lock' + """ + + def __init__(self, lock_file: str, age_seconds: float) -> None: + #: Path to the stale lock file + self.lock_file = lock_file + #: Age in seconds + self.age_seconds = age_seconds + super().__init__(f"Stale lock ({age_seconds}s old): {lock_file}") + + def __reduce__(self) -> tuple[type[FileLockStale], tuple[str, float]]: + """Support pickling for multiprocessing.""" + return self.__class__, (self.lock_file, self.age_seconds) + + +# ============================================================================= +# Context Dataclass +# ============================================================================= + + +@dataclasses.dataclass +class FileLockContext: + """Internal state container for :class:`FileLock`. + + This dataclass holds all the configuration and runtime state for a file + lock. It's separated from the lock class to allow easier testing and + to support potential future features like lock serialization. + + Parameters + ---------- + lock_file : str + Absolute path to the lock file. + timeout : float, default=-1.0 + Timeout for lock acquisition. -1 means wait forever. + poll_interval : float, default=0.05 + Interval between acquisition attempts in seconds. + stale_timeout : float, default=300.0 + Age in seconds after which a lock is considered stale. + mode : int, default=0o644 + File permission mode for the lock file. + lock_file_fd : int or None + File descriptor when lock is held, None otherwise. + lock_counter : int, default=0 + Reentrant lock counter (number of times acquired). + + Examples + -------- + >>> ctx = FileLockContext("/tmp/my.lock") + >>> ctx.is_locked + False + >>> ctx.lock_counter + 0 + """ + + lock_file: str + timeout: float = -1.0 + poll_interval: float = 0.05 + stale_timeout: float = 300.0 + mode: int = 0o644 + lock_file_fd: int | None = dataclasses.field(default=None, repr=False) + lock_counter: int = 0 + + @property + def is_locked(self) -> bool: + """Check if the lock is currently held. + + Returns + ------- + bool + True if the lock is held, False otherwise. + """ + return self.lock_file_fd is not None + + +# ============================================================================= +# Return Proxies +# ============================================================================= + + +class AcquireReturnProxy: + """Context manager proxy returned by :meth:`FileLock.acquire`. + + This proxy allows the acquire/release pattern to be used with context + managers while still supporting explicit acquire() calls. + + Parameters + ---------- + lock : FileLock + The lock instance this proxy wraps. + + Examples + -------- + >>> import tempfile + >>> import pathlib + >>> with tempfile.TemporaryDirectory() as tmpdir: + ... lock_path = pathlib.Path(tmpdir) / "my.lock" + ... lock = FileLock(lock_path) + ... with lock.acquire(): # Returns AcquireReturnProxy + ... pass # Lock is held + ... lock.is_locked + False + """ + + def __init__(self, lock: FileLock) -> None: + self._lock = lock + + def __enter__(self) -> FileLock: + """Enter context manager, returning the lock.""" + return self._lock + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Exit context manager, releasing the lock.""" + self._lock.release() + + +class AsyncAcquireReturnProxy: + """Async context manager proxy returned by :meth:`AsyncFileLock.acquire`. + + Parameters + ---------- + lock : AsyncFileLock + The async lock instance this proxy wraps. + + Examples + -------- + >>> async def example(): + ... import tempfile + ... import pathlib + ... with tempfile.TemporaryDirectory() as tmpdir: + ... lock_path = pathlib.Path(tmpdir) / "my.lock" + ... lock = AsyncFileLock(lock_path) + ... proxy = await lock.acquire() + ... async with proxy: + ... pass # Lock is held + ... return lock.is_locked + >>> asyncio.run(example()) + False + """ + + def __init__(self, lock: AsyncFileLock) -> None: + self._lock = lock + + async def __aenter__(self) -> AsyncFileLock: + """Enter async context manager, returning the lock.""" + return self._lock + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Exit async context manager, releasing the lock.""" + await self._lock.release() + + +# ============================================================================= +# FileLock (Synchronous) +# ============================================================================= + + +class FileLock(contextlib.ContextDecorator): + """Portable file-based lock using atomic file creation. + + This lock uses the SoftFileLock pattern where lock acquisition is + achieved through atomic file creation with ``os.O_CREAT | os.O_EXCL``. + This is portable across platforms and doesn't require OS-level locking. + + The lock is reentrant: the same thread can acquire it multiple times, + and must release it the same number of times. + + Parameters + ---------- + lock_file : str or PathLike + Path to the lock file. Parent directory must exist. + timeout : float, default=-1.0 + Maximum time to wait for lock acquisition in seconds. + Use -1 for infinite wait, 0 for non-blocking. + poll_interval : float, default=0.05 + Time between acquisition attempts in seconds. + stale_timeout : float, default=300.0 + Locks older than this (in seconds) are considered stale and + may be removed automatically. Default is 5 minutes. + mode : int, default=0o644 + File permission mode for the lock file. + + Attributes + ---------- + lock_file : str + Path to the lock file. + is_locked : bool + Whether the lock is currently held. + + Examples + -------- + Context manager usage: + + >>> import tempfile + >>> import pathlib + >>> with tempfile.TemporaryDirectory() as tmpdir: + ... lock_path = pathlib.Path(tmpdir) / "my.lock" + ... with FileLock(lock_path): + ... # Critical section + ... pass + + Explicit acquire/release: + + >>> with tempfile.TemporaryDirectory() as tmpdir: + ... lock_path = pathlib.Path(tmpdir) / "my.lock" + ... lock = FileLock(lock_path) + ... lock.acquire() # doctest: +ELLIPSIS + ... try: + ... pass # Critical section + ... finally: + ... lock.release() + <...AcquireReturnProxy object at ...> + + Non-blocking try-acquire: + + >>> with tempfile.TemporaryDirectory() as tmpdir: + ... lock_path = pathlib.Path(tmpdir) / "my.lock" + ... lock = FileLock(lock_path, timeout=0) + ... try: + ... with lock: + ... pass # Got the lock + ... except FileLockTimeout: + ... pass # Lock was held by another process + + See Also + -------- + AsyncFileLock : Async version of this lock. + """ + + def __init__( + self, + lock_file: str | os.PathLike[str], + timeout: float = -1.0, + poll_interval: float = 0.05, + stale_timeout: float = 300.0, + mode: int = 0o644, + ) -> None: + self._context = FileLockContext( + lock_file=os.fspath(lock_file), + timeout=timeout, + poll_interval=poll_interval, + stale_timeout=stale_timeout, + mode=mode, + ) + + @property + def lock_file(self) -> str: + """Return the path to the lock file.""" + return self._context.lock_file + + @property + def is_locked(self) -> bool: + """Check if the lock is currently held by this instance.""" + return self._context.is_locked + + @property + def lock_counter(self) -> int: + """Return the number of times this lock has been acquired.""" + return self._context.lock_counter + + def _acquire(self) -> None: + """Low-level lock acquisition using os.O_CREAT | os.O_EXCL. + + Raises + ------ + FileExistsError + If the lock file already exists (lock is held). + """ + fd = os.open( + self._context.lock_file, + os.O_WRONLY | os.O_CREAT | os.O_EXCL, + self._context.mode, + ) + self._context.lock_file_fd = fd + # Write PID for debugging stale locks + os.write(fd, str(os.getpid()).encode()) + + def _release(self) -> None: + """Low-level lock release: close fd and remove file.""" + fd = self._context.lock_file_fd + if fd is not None: + os.close(fd) + self._context.lock_file_fd = None + pathlib.Path(self._context.lock_file).unlink(missing_ok=True) + + def _is_stale(self) -> bool: + """Check if the existing lock file is stale. + + Returns + ------- + bool + True if the lock is stale (older than stale_timeout). + """ + try: + mtime = pathlib.Path(self._context.lock_file).stat().st_mtime + age = time.time() - mtime + except OSError: + return True + else: + return age > self._context.stale_timeout + + def _remove_stale_lock(self) -> bool: + """Try to remove a stale lock file. + + Returns + ------- + bool + True if the stale lock was removed, False otherwise. + """ + if self._is_stale(): + try: + pathlib.Path(self._context.lock_file).unlink() + except OSError: + pass + else: + return True + return False + + def acquire( + self, + timeout: float | None = None, + poll_interval: float | None = None, + *, + blocking: bool = True, + ) -> AcquireReturnProxy: + """Acquire the file lock. + + Parameters + ---------- + timeout : float, optional + Override the default timeout for this acquisition. + poll_interval : float, optional + Override the default poll interval for this acquisition. + blocking : bool, default=True + If False, equivalent to timeout=0. + + Returns + ------- + AcquireReturnProxy + A context manager that releases the lock on exit. + + Raises + ------ + FileLockTimeout + If the lock cannot be acquired within the timeout. + + Examples + -------- + >>> import tempfile + >>> import pathlib + >>> with tempfile.TemporaryDirectory() as tmpdir: + ... lock_path = pathlib.Path(tmpdir) / "my.lock" + ... lock = FileLock(lock_path) + ... with lock.acquire(timeout=5.0): + ... pass # Lock held + """ + # Handle non-blocking mode + if not blocking: + timeout = 0 + + # Use instance defaults if not overridden + if timeout is None: + timeout = self._context.timeout + if poll_interval is None: + poll_interval = self._context.poll_interval + + # Reentrant: if already locked, just increment counter + if self._context.lock_file_fd is not None: + self._context.lock_counter += 1 + return AcquireReturnProxy(self) + + start_time = time.perf_counter() + + while True: + try: + self._acquire() + self._context.lock_counter = 1 + return AcquireReturnProxy(self) + except FileExistsError: + pass + + # Check for stale lock + if self._remove_stale_lock(): + continue # Retry immediately after removing stale lock + + # Check timeout + elapsed = time.perf_counter() - start_time + if timeout >= 0 and elapsed >= timeout: + raise FileLockTimeout(self._context.lock_file, timeout) + + # Wait before retrying + time.sleep(poll_interval) + + def release(self, force: bool = False) -> None: + """Release the file lock. + + Parameters + ---------- + force : bool, default=False + If True, release the lock even if counter > 1. + + Notes + ----- + For reentrant locks, each acquire() must be matched by a release(). + Use force=True to release regardless of the counter. + + Examples + -------- + >>> import tempfile + >>> import pathlib + >>> with tempfile.TemporaryDirectory() as tmpdir: + ... lock_path = pathlib.Path(tmpdir) / "my.lock" + ... lock = FileLock(lock_path) + ... lock.acquire() # doctest: +ELLIPSIS + ... lock.lock_counter + ... lock.release() + ... lock.is_locked + <...AcquireReturnProxy object at ...> + 1 + False + """ + if self._context.lock_file_fd is None: + return + + if force: + self._context.lock_counter = 0 + self._release() + else: + self._context.lock_counter -= 1 + if self._context.lock_counter <= 0: + self._context.lock_counter = 0 + self._release() + + def __enter__(self) -> Self: + """Enter context manager, acquiring the lock.""" + self.acquire() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Exit context manager, releasing the lock.""" + self.release() + + def __repr__(self) -> str: + """Return a string representation of the lock.""" + state = "locked" if self.is_locked else "unlocked" + return f"" + + +# ============================================================================= +# AsyncFileLock +# ============================================================================= + + +class AsyncFileLock: + """Async file lock wrapping :class:`FileLock` with async polling. + + This class provides an async interface to the underlying :class:`FileLock`, + using ``asyncio.sleep`` instead of blocking ``time.sleep`` during + acquisition polling. This allows other coroutines to run while waiting + for the lock. + + Parameters + ---------- + lock_file : str or PathLike + Path to the lock file. Parent directory must exist. + timeout : float, default=-1.0 + Maximum time to wait for lock acquisition in seconds. + Use -1 for infinite wait, 0 for non-blocking. + poll_interval : float, default=0.05 + Time between acquisition attempts in seconds. + stale_timeout : float, default=300.0 + Locks older than this (in seconds) are considered stale. + mode : int, default=0o644 + File permission mode for the lock file. + + Examples + -------- + Async context manager: + + >>> async def example(): + ... import tempfile + ... import pathlib + ... with tempfile.TemporaryDirectory() as tmpdir: + ... lock_path = pathlib.Path(tmpdir) / "my.lock" + ... async with AsyncFileLock(lock_path) as lock: + ... return lock.is_locked + >>> asyncio.run(example()) + True + + Explicit acquire/release: + + >>> async def example2(): + ... import tempfile + ... import pathlib + ... with tempfile.TemporaryDirectory() as tmpdir: + ... lock_path = pathlib.Path(tmpdir) / "my.lock" + ... lock = AsyncFileLock(lock_path) + ... await lock.acquire() + ... try: + ... return lock.is_locked + ... finally: + ... await lock.release() + >>> asyncio.run(example2()) + True + + See Also + -------- + FileLock : Synchronous version. + """ + + def __init__( + self, + lock_file: str | os.PathLike[str], + timeout: float = -1.0, + poll_interval: float = 0.05, + stale_timeout: float = 300.0, + mode: int = 0o644, + ) -> None: + self._sync_lock = FileLock( + lock_file=lock_file, + timeout=timeout, + poll_interval=poll_interval, + stale_timeout=stale_timeout, + mode=mode, + ) + + @property + def lock_file(self) -> str: + """Return the path to the lock file.""" + return self._sync_lock.lock_file + + @property + def is_locked(self) -> bool: + """Check if the lock is currently held by this instance.""" + return self._sync_lock.is_locked + + @property + def lock_counter(self) -> int: + """Return the number of times this lock has been acquired.""" + return self._sync_lock.lock_counter + + async def acquire( + self, + timeout: float | None = None, + poll_interval: float | None = None, + *, + blocking: bool = True, + ) -> AsyncAcquireReturnProxy: + """Acquire the file lock asynchronously. + + Parameters + ---------- + timeout : float, optional + Override the default timeout for this acquisition. + poll_interval : float, optional + Override the default poll interval for this acquisition. + blocking : bool, default=True + If False, equivalent to timeout=0. + + Returns + ------- + AsyncAcquireReturnProxy + An async context manager that releases the lock on exit. + + Raises + ------ + FileLockTimeout + If the lock cannot be acquired within the timeout. + """ + if not blocking: + timeout = 0 + + ctx = self._sync_lock._context + if timeout is None: + timeout = ctx.timeout + if poll_interval is None: + poll_interval = ctx.poll_interval + + # Reentrant + if ctx.lock_file_fd is not None: + ctx.lock_counter += 1 + return AsyncAcquireReturnProxy(self) + + start_time = time.perf_counter() + + while True: + try: + self._sync_lock._acquire() + ctx.lock_counter = 1 + return AsyncAcquireReturnProxy(self) + except FileExistsError: + pass + + # Check for stale lock + if self._sync_lock._remove_stale_lock(): + continue + + # Check timeout + elapsed = time.perf_counter() - start_time + if timeout >= 0 and elapsed >= timeout: + raise FileLockTimeout(ctx.lock_file, timeout) + + # Async sleep to allow other coroutines to run + await asyncio.sleep(poll_interval) + + async def release(self, force: bool = False) -> None: + """Release the file lock. + + Parameters + ---------- + force : bool, default=False + If True, release the lock even if counter > 1. + """ + self._sync_lock.release(force=force) + + async def __aenter__(self) -> Self: + """Enter async context manager, acquiring the lock.""" + await self.acquire() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Exit async context manager, releasing the lock.""" + await self.release() + + def __repr__(self) -> str: + """Return a string representation of the lock.""" + state = "locked" if self.is_locked else "unlocked" + return f"" + + +# ============================================================================= +# Convenience Functions +# ============================================================================= + + +def atomic_init( + path: str | os.PathLike[str], + init_fn: t.Callable[[], None], + marker_name: str = ".initialized", + timeout: float = 60.0, + poll_interval: float = 0.05, + stale_timeout: float = 300.0, +) -> bool: + """Atomically initialize a resource using the two-file pattern. + + This function coordinates one-time initialization across multiple processes + using a combination of a lock file and a marker file: + + - **Lock file**: Temporary file held during initialization, deleted after. + - **Marker file**: Permanent file indicating initialization is complete. + + This pattern is useful for expensive one-time setup like cloning + repositories, building caches, or creating database schemas. + + Parameters + ---------- + path : str or PathLike + Directory where the marker file will be created. + init_fn : callable + Function to call for initialization. Called only if not already + initialized. Must be idempotent in case of partial failure. + marker_name : str, default=".initialized" + Name of the marker file within ``path``. + timeout : float, default=60.0 + Maximum time to wait for another process to finish initialization. + poll_interval : float, default=0.05 + Time between lock acquisition attempts. + stale_timeout : float, default=300.0 + Time after which an orphaned lock is considered stale. + + Returns + ------- + bool + True if this call performed initialization, False if another + process did or had already done it. + + Raises + ------ + FileLockTimeout + If initialization by another process doesn't complete within timeout. + + Examples + -------- + >>> import tempfile + >>> import pathlib + >>> def expensive_init(): + ... pass # One-time setup + >>> with tempfile.TemporaryDirectory() as tmpdir: + ... resource_path = pathlib.Path(tmpdir) / "myresource" + ... resource_path.mkdir() + ... # First call does initialization + ... first = atomic_init(resource_path, expensive_init) + ... # Second call sees marker and skips + ... second = atomic_init(resource_path, expensive_init) + ... first, second + (True, False) + + With cleanup on partial failure: + + >>> def init_with_cleanup(): + ... import pathlib + ... import tempfile + ... with tempfile.TemporaryDirectory() as tmpdir: + ... path = pathlib.Path(tmpdir) / "repo" + ... path.mkdir() + ... def do_init(): + ... (path / "data.txt").write_text("hello") + ... atomic_init(path, do_init) + ... return (path / "data.txt").exists() + >>> init_with_cleanup() + True + + See Also + -------- + async_atomic_init : Async version of this function. + """ + path = pathlib.Path(path) + marker = path / marker_name + lock_path = path.parent / f".{path.name}.lock" + + # Fast path: already initialized + if marker.exists(): + return False + + lock = FileLock( + lock_path, + timeout=timeout, + poll_interval=poll_interval, + stale_timeout=stale_timeout, + ) + + with lock: + # Double-check after acquiring lock + if marker.exists(): + return False + + # Clean partial state if needed + if path.exists() and not marker.exists(): + shutil.rmtree(path, ignore_errors=True) + path.mkdir(parents=True, exist_ok=True) + + # Perform initialization + init_fn() + + # Mark as complete + marker.touch() + return True + + +async def async_atomic_init( + path: str | os.PathLike[str], + init_fn: t.Callable[[], None] | t.Callable[[], t.Coroutine[t.Any, t.Any, None]], + marker_name: str = ".initialized", + timeout: float = 60.0, + poll_interval: float = 0.05, + stale_timeout: float = 300.0, +) -> bool: + """Atomically initialize a resource asynchronously. + + Async version of :func:`atomic_init`. Supports both sync and async + ``init_fn`` callables. + + Parameters + ---------- + path : str or PathLike + Directory where the marker file will be created. + init_fn : callable + Sync or async function to call for initialization. + marker_name : str, default=".initialized" + Name of the marker file within ``path``. + timeout : float, default=60.0 + Maximum time to wait for another process to finish initialization. + poll_interval : float, default=0.05 + Time between lock acquisition attempts. + stale_timeout : float, default=300.0 + Time after which an orphaned lock is considered stale. + + Returns + ------- + bool + True if this call performed initialization, False otherwise. + + Raises + ------ + FileLockTimeout + If initialization by another process doesn't complete within timeout. + + Examples + -------- + >>> async def example(): + ... import tempfile + ... import pathlib + ... async def async_init(): + ... await asyncio.sleep(0) # Simulate async work + ... with tempfile.TemporaryDirectory() as tmpdir: + ... path = pathlib.Path(tmpdir) / "resource" + ... path.mkdir() + ... result = await async_atomic_init(path, async_init) + ... return result + >>> asyncio.run(example()) + True + + See Also + -------- + atomic_init : Synchronous version. + """ + import inspect + + path = pathlib.Path(path) + marker = path / marker_name + lock_path = path.parent / f".{path.name}.lock" + + # Fast path + if marker.exists(): + return False + + lock = AsyncFileLock( + lock_path, + timeout=timeout, + poll_interval=poll_interval, + stale_timeout=stale_timeout, + ) + + async with lock: + if marker.exists(): + return False + + if path.exists() and not marker.exists(): + shutil.rmtree(path, ignore_errors=True) + path.mkdir(parents=True, exist_ok=True) + + # Handle both sync and async init functions + result = init_fn() + if inspect.iscoroutine(result): + await result + + marker.touch() + return True diff --git a/src/libvcs/cmd/_async/__init__.py b/src/libvcs/cmd/_async/__init__.py new file mode 100644 index 000000000..5fb7f6d8a --- /dev/null +++ b/src/libvcs/cmd/_async/__init__.py @@ -0,0 +1,21 @@ +"""Async command abstractions for VCS operations. + +This module provides async equivalents of the sync command classes +in :mod:`libvcs.cmd`. + +Note +---- +This is an internal API not covered by versioning policy. +""" + +from __future__ import annotations + +from libvcs.cmd._async.git import AsyncGit +from libvcs.cmd._async.hg import AsyncHg +from libvcs.cmd._async.svn import AsyncSvn + +__all__ = [ + "AsyncGit", + "AsyncHg", + "AsyncSvn", +] diff --git a/src/libvcs/cmd/_async/git.py b/src/libvcs/cmd/_async/git.py new file mode 100644 index 000000000..076fcbce6 --- /dev/null +++ b/src/libvcs/cmd/_async/git.py @@ -0,0 +1,1334 @@ +"""Async git commands directly against a local git repo. + +Async equivalent of :mod:`libvcs.cmd.git`. + +Note +---- +This is an internal API not covered by versioning policy. +""" + +from __future__ import annotations + +import pathlib +import typing as t +from collections.abc import Sequence + +from libvcs._internal.async_run import ( + AsyncProgressCallbackProtocol, + async_run, +) +from libvcs._internal.types import StrOrBytesPath, StrPath + +_CMD = StrOrBytesPath | Sequence[StrOrBytesPath] + + +class AsyncGit: + """Run commands directly on a git repository asynchronously. + + Async equivalent of :class:`~libvcs.cmd.git.Git`. + + Parameters + ---------- + path : str | Path + Path to the git repository + progress_callback : AsyncProgressCallbackProtocol, optional + Async callback for progress reporting + + Examples + -------- + >>> async def example(): + ... git = AsyncGit(path=tmp_path) + ... await git.run(['init']) + ... status = await git.status() + ... return 'On branch' in status or 'No commits yet' in status + >>> asyncio.run(example()) + True + """ + + progress_callback: AsyncProgressCallbackProtocol | None = None + + # Sub-commands (will be populated in __init__) + submodule: AsyncGitSubmoduleCmd + remotes: AsyncGitRemoteManager + stash: AsyncGitStashCmd + + def __init__( + self, + *, + path: StrPath, + progress_callback: AsyncProgressCallbackProtocol | None = None, + ) -> None: + """Initialize AsyncGit command wrapper. + + Parameters + ---------- + path : str | Path + Path to the git repository + progress_callback : AsyncProgressCallbackProtocol, optional + Async callback for progress reporting + """ + self.path: pathlib.Path + if isinstance(path, pathlib.Path): + self.path = path + else: + self.path = pathlib.Path(path) + + self.progress_callback = progress_callback + + # Initialize sub-command managers + self.submodule = AsyncGitSubmoduleCmd(path=self.path, cmd=self) + self.remotes = AsyncGitRemoteManager(path=self.path, cmd=self) + self.stash = AsyncGitStashCmd(path=self.path, cmd=self) + + def __repr__(self) -> str: + """Representation of AsyncGit repo command object.""" + return f"" + + async def run( + self, + args: _CMD, + *, + # Normal flags + C: StrOrBytesPath | list[StrOrBytesPath] | None = None, + cwd: StrOrBytesPath | None = None, + git_dir: StrOrBytesPath | None = None, + work_tree: StrOrBytesPath | None = None, + namespace: StrOrBytesPath | None = None, + bare: bool | None = None, + no_replace_objects: bool | None = None, + literal_pathspecs: bool | None = None, + no_optional_locks: bool | None = None, + config: dict[str, t.Any] | None = None, + # Pass-through to async_run() + log_in_real_time: bool = False, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Run a command for this git repository asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.git.Git.run`. + + Parameters + ---------- + args : list[str] | str + Git subcommand and arguments + cwd : str | Path, optional + Working directory. Defaults to self.path. + config : dict[str, Any], optional + Git config options to pass via --config + check_returncode : bool, default True + Raise on non-zero exit code + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + + Examples + -------- + >>> async def example(): + ... git = AsyncGit(path=tmp_path) + ... await git.run(['init']) + ... return 'On branch' in await git.run(['status']) + >>> asyncio.run(example()) + True + """ + cli_args: list[str] + if isinstance(args, Sequence) and not isinstance(args, (str, bytes)): + cli_args = ["git", *[str(a) for a in args]] + else: + cli_args = ["git", str(args)] + + run_cwd = cwd if cwd is not None else self.path + + # Build flags + if C is not None: + c_list = [C] if not isinstance(C, list) else C + for c in c_list: + cli_args.extend(["-C", str(c)]) + if config is not None: + for k, v in config.items(): + val = "true" if v is True else ("false" if v is False else str(v)) + cli_args.extend(["--config", f"{k}={val}"]) + if git_dir is not None: + cli_args.extend(["--git-dir", str(git_dir)]) + if work_tree is not None: + cli_args.extend(["--work-tree", str(work_tree)]) + if namespace is not None: + cli_args.extend(["--namespace", str(namespace)]) + if bare is True: + cli_args.append("--bare") + if no_replace_objects is True: + cli_args.append("--no-replace-objects") + if literal_pathspecs is True: + cli_args.append("--literal-pathspecs") + if no_optional_locks is True: + cli_args.append("--no-optional-locks") + + return await async_run( + args=cli_args, + cwd=run_cwd, + check_returncode=check_returncode, + callback=self.progress_callback if log_in_real_time else None, + timeout=timeout, + **kwargs, + ) + + async def clone( + self, + *, + url: str, + depth: int | None = None, + branch: str | None = None, + origin: str | None = None, + progress: bool | None = None, + no_checkout: bool | None = None, + quiet: bool | None = None, + verbose: bool | None = None, + config: dict[str, t.Any] | None = None, + log_in_real_time: bool = False, + check_returncode: bool | None = None, + make_parents: bool | None = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Clone a working copy from a git repo asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.git.Git.clone`. + + Parameters + ---------- + url : str + Repository URL to clone + depth : int, optional + Create a shallow clone with history truncated + branch : str, optional + Branch to checkout after clone + origin : str, optional + Name for the remote + progress : bool, optional + Force progress reporting + quiet : bool, optional + Suppress output + make_parents : bool, default True + Create parent directories if they don't exist + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + + Examples + -------- + >>> async def example(): + ... repo_path = tmp_path / 'cloned_repo' + ... git = AsyncGit(path=repo_path) + ... url = f'file://{create_git_remote_repo()}' + ... await git.clone(url=url) + ... return (repo_path / '.git').exists() + >>> asyncio.run(example()) + True + """ + if make_parents and not self.path.exists(): + self.path.mkdir(parents=True) + + local_flags: list[str] = [] + if depth is not None: + local_flags.extend(["--depth", str(depth)]) + if branch is not None: + local_flags.extend(["--branch", branch]) + if origin is not None: + local_flags.extend(["--origin", origin]) + if quiet is True: + local_flags.append("--quiet") + if verbose is True: + local_flags.append("--verbose") + if progress is True: + local_flags.append("--progress") + if no_checkout is True: + local_flags.append("--no-checkout") + + return await self.run( + ["clone", *local_flags, url, str(self.path)], + cwd=self.path.parent, + config=config, + log_in_real_time=log_in_real_time, + check_returncode=check_returncode if check_returncode is not None else True, + timeout=timeout, + **kwargs, + ) + + async def fetch( + self, + *, + repository: str | None = None, + refspec: str | list[str] | None = None, + _all: bool | None = None, + append: bool | None = None, + depth: int | None = None, + force: bool | None = None, + prune: bool | None = None, + prune_tags: bool | None = None, + tags: bool | None = None, + no_tags: bool | None = None, + quiet: bool | None = None, + verbose: bool | None = None, + progress: bool | None = None, + log_in_real_time: bool = False, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Fetch from remote repository asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.git.Git.fetch`. + + Parameters + ---------- + repository : str, optional + Remote name to fetch from + refspec : str | list[str], optional + Refspec(s) to fetch + _all : bool, optional + Fetch all remotes + depth : int, optional + Deepen shallow clone + prune : bool, optional + Remove remote-tracking refs that no longer exist + tags : bool, optional + Fetch all tags + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + """ + local_flags: list[str] = [] + if _all is True: + local_flags.append("--all") + if append is True: + local_flags.append("--append") + if depth is not None: + local_flags.extend(["--depth", str(depth)]) + if force is True: + local_flags.append("--force") + if prune is True: + local_flags.append("--prune") + if prune_tags is True: + local_flags.append("--prune-tags") + if tags is True: + local_flags.append("--tags") + if no_tags is True: + local_flags.append("--no-tags") + if quiet is True: + local_flags.append("--quiet") + if verbose is True: + local_flags.append("--verbose") + if progress is True: + local_flags.append("--progress") + + args: list[str] = ["fetch", *local_flags] + if repository is not None: + args.append(repository) + if refspec is not None: + if isinstance(refspec, list): + args.extend(refspec) + else: + args.append(refspec) + + return await self.run( + args, + log_in_real_time=log_in_real_time, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def checkout( + self, + *, + branch: str | None = None, + pathspec: str | list[str] | None = None, + force: bool | None = None, + quiet: bool | None = None, + detach: bool | None = None, + track: bool | str | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Checkout a branch or paths asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.git.Git.checkout`. + + Parameters + ---------- + branch : str, optional + Branch name to checkout + pathspec : str | list[str], optional + Path(s) to checkout + force : bool, optional + Force checkout (discard local changes) + quiet : bool, optional + Suppress output + detach : bool, optional + Detach HEAD at named commit + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + """ + local_flags: list[str] = [] + if force is True: + local_flags.append("--force") + if quiet is True: + local_flags.append("--quiet") + if detach is True: + local_flags.append("--detach") + if track is True: + local_flags.append("--track") + elif isinstance(track, str): + local_flags.append(f"--track={track}") + + args: list[str] = ["checkout", *local_flags] + if branch is not None: + args.append(branch) + if pathspec is not None: + args.append("--") + if isinstance(pathspec, list): + args.extend(pathspec) + else: + args.append(pathspec) + + return await self.run( + args, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def status( + self, + *, + short: bool | None = None, + branch: bool | None = None, + porcelain: bool | str | None = None, + untracked_files: str | None = None, + ignored: bool | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Show working tree status asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.git.Git.status`. + + Parameters + ---------- + short : bool, optional + Give output in short format + branch : bool, optional + Show branch info even in short format + porcelain : bool | str, optional + Machine-readable format + untracked_files : str, optional + Untracked files mode: "no", "normal", "all" + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Status output + """ + local_flags: list[str] = [] + if short is True: + local_flags.append("--short") + if branch is True: + local_flags.append("--branch") + if porcelain is True: + local_flags.append("--porcelain") + elif isinstance(porcelain, str): + local_flags.append(f"--porcelain={porcelain}") + if untracked_files is not None: + local_flags.append(f"--untracked-files={untracked_files}") + if ignored is True: + local_flags.append("--ignored") + + return await self.run( + ["status", *local_flags], + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def rev_parse( + self, + *, + args: str | list[str] | None = None, + verify: bool | None = None, + short: bool | int | None = None, + abbrev_ref: bool | str | None = None, + show_toplevel: bool | None = None, + git_dir: bool | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Parse git references asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.git.Git.rev_parse`. + + Parameters + ---------- + args : str | list[str], optional + Revision arguments to parse + verify : bool, optional + Verify the parameter is a valid object name + short : bool | int, optional + Use short object name + abbrev_ref : bool | str, optional + Use abbreviated ref format + show_toplevel : bool, optional + Show path of top-level directory + git_dir : bool, optional + Show path of .git directory + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Parsed reference + """ + local_flags: list[str] = [] + if verify is True: + local_flags.append("--verify") + if short is True: + local_flags.append("--short") + elif isinstance(short, int): + local_flags.append(f"--short={short}") + if abbrev_ref is True: + local_flags.append("--abbrev-ref") + elif isinstance(abbrev_ref, str): + local_flags.append(f"--abbrev-ref={abbrev_ref}") + if show_toplevel is True: + local_flags.append("--show-toplevel") + if git_dir is True: + local_flags.append("--git-dir") + + cmd_args: list[str] = ["rev-parse", *local_flags] + if args is not None: + if isinstance(args, list): + cmd_args.extend(args) + else: + cmd_args.append(args) + + return await self.run( + cmd_args, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def symbolic_ref( + self, + *, + name: str, + ref: str | None = None, + short: bool | None = None, + quiet: bool | None = None, + delete: bool | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Read, modify, or delete symbolic refs asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.git.Git.symbolic_ref`. + + Parameters + ---------- + name : str + Symbolic ref name + ref : str, optional + Ref to set symbolic ref to + short : bool, optional + Shorten ref name + quiet : bool, optional + Suppress error messages + delete : bool, optional + Delete symbolic ref + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Symbolic ref value + """ + local_flags: list[str] = [] + if short is True: + local_flags.append("--short") + if quiet is True: + local_flags.append("--quiet") + if delete is True: + local_flags.append("--delete") + + cmd_args: list[str] = ["symbolic-ref", *local_flags, name] + if ref is not None: + cmd_args.append(ref) + + return await self.run( + cmd_args, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def rev_list( + self, + *, + commit: str | list[str] | None = None, + max_count: int | None = None, + abbrev_commit: bool | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """List commit objects asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.git.Git.rev_list`. + + Parameters + ---------- + commit : str | list[str], optional + Commit(s) to list + max_count : int, optional + Limit output to n commits + abbrev_commit : bool, optional + Show abbreviated commit IDs + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + List of commit objects + """ + local_flags: list[str] = [] + if max_count is not None: + local_flags.extend(["--max-count", str(max_count)]) + if abbrev_commit is True: + local_flags.append("--abbrev-commit") + + cmd_args: list[str] = ["rev-list", *local_flags] + if commit is not None: + if isinstance(commit, list): + cmd_args.extend(commit) + else: + cmd_args.append(commit) + + return await self.run( + cmd_args, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def show_ref( + self, + *, + pattern: str | list[str] | None = None, + heads: bool | None = None, + tags: bool | None = None, + hash_only: bool | None = None, + verify: bool | None = None, + quiet: bool | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """List references asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.git.Git.show_ref`. + + Parameters + ---------- + pattern : str | list[str], optional + Pattern(s) to filter refs + heads : bool, optional + Show only heads + tags : bool, optional + Show only tags + hash_only : bool, optional + Show only hash + verify : bool, optional + Verify ref exists + quiet : bool, optional + Suppress output (just exit status) + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Reference list + """ + local_flags: list[str] = [] + if heads is True: + local_flags.append("--heads") + if tags is True: + local_flags.append("--tags") + if hash_only is True: + local_flags.append("--hash") + if verify is True: + local_flags.append("--verify") + if quiet is True: + local_flags.append("--quiet") + + cmd_args: list[str] = ["show-ref", *local_flags] + if pattern is not None: + if isinstance(pattern, list): + cmd_args.extend(pattern) + else: + cmd_args.append(pattern) + + return await self.run( + cmd_args, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def reset( + self, + *, + pathspec: str | list[str] | None = None, + soft: bool | None = None, + mixed: bool | None = None, + hard: bool | None = None, + merge: bool | None = None, + keep: bool | None = None, + quiet: bool | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Reset current HEAD asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.git.Git.reset`. + + Parameters + ---------- + pathspec : str | list[str], optional + Commit or paths to reset + soft : bool, optional + Reset HEAD only + mixed : bool, optional + Reset HEAD and index (default) + hard : bool, optional + Reset HEAD, index, and working tree + quiet : bool, optional + Suppress output + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + """ + local_flags: list[str] = [] + if soft is True: + local_flags.append("--soft") + if mixed is True: + local_flags.append("--mixed") + if hard is True: + local_flags.append("--hard") + if merge is True: + local_flags.append("--merge") + if keep is True: + local_flags.append("--keep") + if quiet is True: + local_flags.append("--quiet") + + cmd_args: list[str] = ["reset", *local_flags] + if pathspec is not None: + if isinstance(pathspec, list): + cmd_args.extend(pathspec) + else: + cmd_args.append(pathspec) + + return await self.run( + cmd_args, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def rebase( + self, + *, + upstream: str | None = None, + onto: str | None = None, + abort: bool | None = None, + _continue: bool | None = None, + skip: bool | None = None, + interactive: bool | None = None, + quiet: bool | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Rebase commits asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.git.Git.rebase`. + + Parameters + ---------- + upstream : str, optional + Upstream branch to rebase onto + onto : str, optional + Starting point for rebase + abort : bool, optional + Abort current rebase + _continue : bool, optional + Continue current rebase + skip : bool, optional + Skip current patch + interactive : bool, optional + Interactive rebase (use with caution in async) + quiet : bool, optional + Suppress output + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + """ + local_flags: list[str] = [] + if onto is not None: + local_flags.extend(["--onto", onto]) + if abort is True: + local_flags.append("--abort") + if _continue is True: + local_flags.append("--continue") + if skip is True: + local_flags.append("--skip") + if interactive is True: + local_flags.append("--interactive") + if quiet is True: + local_flags.append("--quiet") + + cmd_args: list[str] = ["rebase", *local_flags] + if upstream is not None: + cmd_args.append(upstream) + + return await self.run( + cmd_args, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def version( + self, + *, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Get git version asynchronously. + + Returns + ------- + str + Git version string + """ + return await self.run(["version"], timeout=timeout, **kwargs) + + +class AsyncGitSubmoduleCmd: + """Async git submodule commands. + + Async equivalent of :class:`~libvcs.cmd.git.GitSubmoduleCmd`. + """ + + def __init__( + self, + *, + path: StrPath, + cmd: AsyncGit, + ) -> None: + """Initialize submodule command wrapper.""" + self.path: pathlib.Path + if isinstance(path, pathlib.Path): + self.path = path + else: + self.path = pathlib.Path(path) + self.cmd = cmd + + async def init( + self, + *, + path: StrPath | list[StrPath] | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Initialize submodules asynchronously. + + Parameters + ---------- + path : str | Path | list, optional + Submodule path(s) to initialize + + Returns + ------- + str + Command output + """ + cmd_args: list[str] = ["submodule", "init"] + if path is not None: + if isinstance(path, list): + cmd_args.extend([str(p) for p in path]) + else: + cmd_args.append(str(path)) + + return await self.cmd.run( + cmd_args, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def update( + self, + *, + path: StrPath | list[StrPath] | None = None, + init: bool | None = None, + recursive: bool | None = None, + force: bool | None = None, + remote: bool | None = None, + log_in_real_time: bool = False, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Update submodules asynchronously. + + Parameters + ---------- + path : str | Path | list, optional + Submodule path(s) to update + init : bool, optional + Initialize uninitialized submodules + recursive : bool, optional + Update nested submodules + force : bool, optional + Force checkout + + Returns + ------- + str + Command output + """ + local_flags: list[str] = [] + if init is True: + local_flags.append("--init") + if recursive is True: + local_flags.append("--recursive") + if force is True: + local_flags.append("--force") + if remote is True: + local_flags.append("--remote") + + cmd_args: list[str] = ["submodule", "update", *local_flags] + if path is not None: + cmd_args.append("--") + if isinstance(path, list): + cmd_args.extend([str(p) for p in path]) + else: + cmd_args.append(str(path)) + + return await self.cmd.run( + cmd_args, + log_in_real_time=log_in_real_time, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + +class AsyncGitRemoteManager: + """Async git remote management commands. + + Async equivalent of :class:`~libvcs.cmd.git.GitRemoteManager`. + """ + + def __init__( + self, + *, + path: StrPath, + cmd: AsyncGit, + ) -> None: + """Initialize remote manager.""" + self.path: pathlib.Path + if isinstance(path, pathlib.Path): + self.path = path + else: + self.path = pathlib.Path(path) + self.cmd = cmd + + async def ls( + self, + *, + timeout: float | None = None, + **kwargs: t.Any, + ) -> list[str]: + """List remote names asynchronously. + + Returns + ------- + list[str] + List of remote names + """ + output = await self.cmd.run(["remote"], timeout=timeout, **kwargs) + if not output.strip(): + return [] + return output.strip().split("\n") + + async def show( + self, + *, + name: str | None = None, + verbose: bool | None = None, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Show remotes asynchronously. + + Parameters + ---------- + name : str, optional + Remote name to show details for + verbose : bool, optional + Show URLs + + Returns + ------- + str + Remote information + """ + local_flags: list[str] = [] + if verbose is True: + local_flags.append("--verbose") + + cmd_args: list[str] = ["remote", *local_flags] + if name is not None: + cmd_args.extend(["show", name]) + + return await self.cmd.run(cmd_args, timeout=timeout, **kwargs) + + async def add( + self, + *, + name: str, + url: str, + fetch: bool | None = None, + tags: bool | None = None, + no_tags: bool | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Add a remote asynchronously. + + Parameters + ---------- + name : str + Remote name + url : str + Remote URL + fetch : bool, optional + Fetch after adding + tags : bool, optional + Import tags + no_tags : bool, optional + Don't import tags + + Returns + ------- + str + Command output + """ + local_flags: list[str] = [] + if fetch is True: + local_flags.append("--fetch") + if tags is True: + local_flags.append("--tags") + if no_tags is True: + local_flags.append("--no-tags") + + return await self.cmd.run( + ["remote", "add", *local_flags, name, url], + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def remove( + self, + *, + name: str, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Remove a remote asynchronously. + + Parameters + ---------- + name : str + Remote name to remove + + Returns + ------- + str + Command output + """ + return await self.cmd.run( + ["remote", "remove", name], + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def get_url( + self, + *, + name: str, + push: bool | None = None, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Get URL for a remote asynchronously. + + Parameters + ---------- + name : str + Remote name + push : bool, optional + Get push URL + + Returns + ------- + str + Remote URL + """ + local_flags: list[str] = [] + if push is True: + local_flags.append("--push") + + return await self.cmd.run( + ["remote", "get-url", *local_flags, name], + timeout=timeout, + **kwargs, + ) + + +class AsyncGitStashCmd: + """Async git stash commands. + + Async equivalent of :class:`~libvcs.cmd.git.GitStashCmd`. + """ + + def __init__( + self, + *, + path: StrPath, + cmd: AsyncGit, + ) -> None: + """Initialize stash command wrapper.""" + self.path: pathlib.Path + if isinstance(path, pathlib.Path): + self.path = path + else: + self.path = pathlib.Path(path) + self.cmd = cmd + + async def ls( + self, + *, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """List stashes asynchronously. + + Returns + ------- + str + Stash list + """ + return await self.cmd.run(["stash", "list"], timeout=timeout, **kwargs) + + async def save( + self, + *, + message: str | None = None, + keep_index: bool | None = None, + include_untracked: bool | None = None, + all_files: bool | None = None, + quiet: bool | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Save changes to stash asynchronously. + + Parameters + ---------- + message : str, optional + Stash message + keep_index : bool, optional + Keep staged changes in index + include_untracked : bool, optional + Include untracked files + all_files : bool, optional + Include ignored files too + quiet : bool, optional + Suppress output + + Returns + ------- + str + Command output + """ + local_flags: list[str] = [] + if keep_index is True: + local_flags.append("--keep-index") + if include_untracked is True: + local_flags.append("--include-untracked") + if all_files is True: + local_flags.append("--all") + if quiet is True: + local_flags.append("--quiet") + + cmd_args: list[str] = ["stash", "save", *local_flags] + if message is not None: + cmd_args.append(message) + + return await self.cmd.run( + cmd_args, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def pop( + self, + *, + stash: str | None = None, + index: bool | None = None, + quiet: bool | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Pop stash asynchronously. + + Parameters + ---------- + stash : str, optional + Stash to pop (defaults to latest) + index : bool, optional + Also restore index + quiet : bool, optional + Suppress output + + Returns + ------- + str + Command output + """ + local_flags: list[str] = [] + if index is True: + local_flags.append("--index") + if quiet is True: + local_flags.append("--quiet") + + cmd_args: list[str] = ["stash", "pop", *local_flags] + if stash is not None: + cmd_args.append(stash) + + return await self.cmd.run( + cmd_args, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def drop( + self, + *, + stash: str | None = None, + quiet: bool | None = None, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Drop stash asynchronously. + + Parameters + ---------- + stash : str, optional + Stash to drop (defaults to latest) + quiet : bool, optional + Suppress output + + Returns + ------- + str + Command output + """ + local_flags: list[str] = [] + if quiet is True: + local_flags.append("--quiet") + + cmd_args: list[str] = ["stash", "drop", *local_flags] + if stash is not None: + cmd_args.append(stash) + + return await self.cmd.run( + cmd_args, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def clear( + self, + *, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Clear all stashes asynchronously. + + Returns + ------- + str + Command output + """ + return await self.cmd.run(["stash", "clear"], timeout=timeout, **kwargs) diff --git a/src/libvcs/cmd/_async/hg.py b/src/libvcs/cmd/_async/hg.py new file mode 100644 index 000000000..27d17fd8f --- /dev/null +++ b/src/libvcs/cmd/_async/hg.py @@ -0,0 +1,373 @@ +"""Async hg (Mercurial) commands directly against a local mercurial repo. + +Async equivalent of :mod:`libvcs.cmd.hg`. + +Note +---- +This is an internal API not covered by versioning policy. +""" + +from __future__ import annotations + +import enum +import pathlib +import typing as t +from collections.abc import Sequence + +from libvcs._internal.async_run import ( + AsyncProgressCallbackProtocol, + async_run, +) +from libvcs._internal.types import StrOrBytesPath, StrPath + +_CMD = StrOrBytesPath | Sequence[StrOrBytesPath] + + +class HgColorType(enum.Enum): + """CLI Color enum for Mercurial.""" + + boolean = "boolean" + always = "always" + auto = "auto" + never = "never" + debug = "debug" + + +class HgPagerType(enum.Enum): + """CLI Pagination enum for Mercurial.""" + + boolean = "boolean" + always = "always" + auto = "auto" + never = "never" + + +class AsyncHg: + """Run commands directly on a Mercurial repository asynchronously. + + Async equivalent of :class:`~libvcs.cmd.hg.Hg`. + + Parameters + ---------- + path : str | Path + Path to the hg repository + progress_callback : AsyncProgressCallbackProtocol, optional + Async callback for progress reporting + + Examples + -------- + >>> async def example(): + ... repo_path = tmp_path / 'hg_repo' + ... hg = AsyncHg(path=repo_path) + ... url = f'file://{create_hg_remote_repo()}' + ... await hg.clone(url=url) + ... return (repo_path / '.hg').exists() + >>> asyncio.run(example()) + True + """ + + progress_callback: AsyncProgressCallbackProtocol | None = None + + def __init__( + self, + *, + path: StrPath, + progress_callback: AsyncProgressCallbackProtocol | None = None, + ) -> None: + """Initialize AsyncHg command wrapper. + + Parameters + ---------- + path : str | Path + Path to the hg repository + progress_callback : AsyncProgressCallbackProtocol, optional + Async callback for progress reporting + """ + self.path: pathlib.Path + if isinstance(path, pathlib.Path): + self.path = path + else: + self.path = pathlib.Path(path) + + self.progress_callback = progress_callback + + def __repr__(self) -> str: + """Representation of AsyncHg repo command object.""" + return f"" + + async def run( + self, + args: _CMD, + *, + config: str | None = None, + repository: str | None = None, + quiet: bool | None = None, + _help: bool | None = None, + encoding: str | None = None, + encoding_mode: str | None = None, + verbose: bool | None = None, + traceback: bool | None = None, + debug: bool | None = None, + debugger: bool | None = None, + profile: bool | None = None, + version: bool | None = None, + hidden: bool | None = None, + time: bool | None = None, + pager: HgPagerType | None = None, + color: HgColorType | None = None, + # Pass-through to async_run() + cwd: StrOrBytesPath | None = None, + log_in_real_time: bool = False, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Run a command for this Mercurial repository asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.hg.Hg.run`. + + Parameters + ---------- + args : list[str] | str + Hg subcommand and arguments + quiet : bool, optional + -q / --quiet + repository : str, optional + --repository REPO + cwd : str | Path, optional + Working directory. Defaults to self.path. + verbose : bool, optional + -v / --verbose + color : HgColorType, optional + --color + debug : bool, optional + --debug + config : str, optional + --config CONFIG, section.name=value + check_returncode : bool, default True + Raise on non-zero exit code + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + """ + cli_args: list[str] + if isinstance(args, Sequence) and not isinstance(args, (str, bytes)): + cli_args = ["hg", *[str(a) for a in args]] + else: + cli_args = ["hg", str(args)] + + run_cwd = cwd if cwd is not None else self.path + + # Build flags + if repository is not None: + cli_args.extend(["--repository", repository]) + if config is not None: + cli_args.extend(["--config", config]) + if pager is not None: + cli_args.extend(["--pager", pager.value]) + if color is not None: + cli_args.extend(["--color", color.value]) + if verbose is True: + cli_args.append("--verbose") + if quiet is True: + cli_args.append("--quiet") + if debug is True: + cli_args.append("--debug") + if debugger is True: + cli_args.append("--debugger") + if traceback is True: + cli_args.append("--traceback") + if time is True: + cli_args.append("--time") + if profile is True: + cli_args.append("--profile") + if version is True: + cli_args.append("--version") + if _help is True: + cli_args.append("--help") + + return await async_run( + cli_args, + cwd=run_cwd, + callback=self.progress_callback if log_in_real_time else None, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def clone( + self, + *, + url: str, + no_update: bool | None = None, + update_rev: str | None = None, + rev: str | None = None, + branch: str | None = None, + ssh: str | None = None, + remote_cmd: str | None = None, + pull: bool | None = None, + stream: bool | None = None, + insecure: bool | None = None, + quiet: bool | None = None, + # Special behavior + make_parents: bool | None = True, + # Pass-through + log_in_real_time: bool = False, + check_returncode: bool = True, + timeout: float | None = None, + ) -> str: + """Clone a working copy from a mercurial repo asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.hg.Hg.clone`. + + Parameters + ---------- + url : str + URL of the repository to clone + no_update : bool, optional + Don't update the working directory + rev : str, optional + Revision to clone + branch : str, optional + Branch to clone + ssh : str, optional + SSH command to use + make_parents : bool, default True + Creates checkout directory if it doesn't exist + check_returncode : bool, default True + Raise on non-zero exit code + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + """ + required_flags: list[str] = [url, str(self.path)] + local_flags: list[str] = [] + + if ssh is not None: + local_flags.extend(["--ssh", ssh]) + if remote_cmd is not None: + local_flags.extend(["--remotecmd", remote_cmd]) + if rev is not None: + local_flags.extend(["--rev", rev]) + if branch is not None: + local_flags.extend(["--branch", branch]) + if no_update is True: + local_flags.append("--noupdate") + if pull is True: + local_flags.append("--pull") + if stream is True: + local_flags.append("--stream") + if insecure is True: + local_flags.append("--insecure") + if quiet is True: + local_flags.append("--quiet") + + # libvcs special behavior + if make_parents and not self.path.exists(): + self.path.mkdir(parents=True) + + return await self.run( + ["clone", *local_flags, "--", *required_flags], + log_in_real_time=log_in_real_time, + check_returncode=check_returncode, + timeout=timeout, + ) + + async def update( + self, + quiet: bool | None = None, + verbose: bool | None = None, + # Pass-through + log_in_real_time: bool = False, + check_returncode: bool = True, + timeout: float | None = None, + ) -> str: + """Update working directory asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.hg.Hg.update`. + + Parameters + ---------- + quiet : bool, optional + Suppress output + verbose : bool, optional + Enable verbose output + check_returncode : bool, default True + Raise on non-zero exit code + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + """ + local_flags: list[str] = [] + + if quiet: + local_flags.append("--quiet") + if verbose: + local_flags.append("--verbose") + + return await self.run( + ["update", *local_flags], + log_in_real_time=log_in_real_time, + check_returncode=check_returncode, + timeout=timeout, + ) + + async def pull( + self, + quiet: bool | None = None, + verbose: bool | None = None, + update: bool | None = None, + # Pass-through + log_in_real_time: bool = False, + check_returncode: bool = True, + timeout: float | None = None, + ) -> str: + """Pull changes from remote asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.hg.Hg.pull`. + + Parameters + ---------- + quiet : bool, optional + Suppress output + verbose : bool, optional + Enable verbose output + update : bool, optional + Update to new branch head after pull + check_returncode : bool, default True + Raise on non-zero exit code + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + """ + local_flags: list[str] = [] + + if quiet: + local_flags.append("--quiet") + if verbose: + local_flags.append("--verbose") + if update: + local_flags.append("--update") + + return await self.run( + ["pull", *local_flags], + log_in_real_time=log_in_real_time, + check_returncode=check_returncode, + timeout=timeout, + ) diff --git a/src/libvcs/cmd/_async/svn.py b/src/libvcs/cmd/_async/svn.py new file mode 100644 index 000000000..482e88de9 --- /dev/null +++ b/src/libvcs/cmd/_async/svn.py @@ -0,0 +1,384 @@ +"""Async svn (subversion) commands directly against SVN working copy. + +Async equivalent of :mod:`libvcs.cmd.svn`. + +Note +---- +This is an internal API not covered by versioning policy. +""" + +from __future__ import annotations + +import pathlib +import typing as t +from collections.abc import Sequence + +from libvcs._internal.async_run import ( + AsyncProgressCallbackProtocol, + async_run, +) +from libvcs._internal.types import StrOrBytesPath, StrPath + +_CMD = StrOrBytesPath | Sequence[StrOrBytesPath] + +DepthLiteral = t.Literal["infinity", "empty", "files", "immediates"] | None +RevisionLiteral = t.Literal["HEAD", "BASE", "COMMITTED", "PREV"] | None + + +class AsyncSvn: + """Run commands directly on a Subversion working copy asynchronously. + + Async equivalent of :class:`~libvcs.cmd.svn.Svn`. + + Parameters + ---------- + path : str | Path + Path to the SVN working copy + progress_callback : AsyncProgressCallbackProtocol, optional + Async callback for progress reporting + + Examples + -------- + >>> async def example(): + ... repo_path = tmp_path / 'svn_wc' + ... svn = AsyncSvn(path=repo_path) + ... url = f'file://{create_svn_remote_repo()}' + ... await svn.checkout(url=url) + ... return (repo_path / '.svn').exists() + >>> asyncio.run(example()) + True + """ + + progress_callback: AsyncProgressCallbackProtocol | None = None + + def __init__( + self, + *, + path: StrPath, + progress_callback: AsyncProgressCallbackProtocol | None = None, + ) -> None: + """Initialize AsyncSvn command wrapper. + + Parameters + ---------- + path : str | Path + Path to the SVN working copy + progress_callback : AsyncProgressCallbackProtocol, optional + Async callback for progress reporting + """ + self.path: pathlib.Path + if isinstance(path, pathlib.Path): + self.path = path + else: + self.path = pathlib.Path(path) + + self.progress_callback = progress_callback + + def __repr__(self) -> str: + """Representation of AsyncSvn command object.""" + return f"" + + async def run( + self, + args: _CMD, + *, + quiet: bool | None = None, + username: str | None = None, + password: str | None = None, + no_auth_cache: bool | None = None, + non_interactive: bool | None = True, + trust_server_cert: bool | None = None, + config_dir: pathlib.Path | None = None, + config_option: pathlib.Path | None = None, + # Pass-through to async_run() + cwd: StrOrBytesPath | None = None, + log_in_real_time: bool = False, + check_returncode: bool = True, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Run a command for this SVN working copy asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.svn.Svn.run`. + + Parameters + ---------- + args : list[str] | str + SVN subcommand and arguments + quiet : bool, optional + -q / --quiet + username : str, optional + --username + password : str, optional + --password + no_auth_cache : bool, optional + --no-auth-cache + non_interactive : bool, default True + --non-interactive + trust_server_cert : bool, optional + --trust-server-cert + config_dir : Path, optional + --config-dir + cwd : str | Path, optional + Working directory. Defaults to self.path. + check_returncode : bool, default True + Raise on non-zero exit code + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + """ + cli_args: list[str] + if isinstance(args, Sequence) and not isinstance(args, (str, bytes)): + cli_args = ["svn", *[str(a) for a in args]] + else: + cli_args = ["svn", str(args)] + + run_cwd = cwd if cwd is not None else self.path + + # Build flags + if no_auth_cache is True: + cli_args.append("--no-auth-cache") + if non_interactive is True: + cli_args.append("--non-interactive") + if username is not None: + cli_args.extend(["--username", username]) + if password is not None: + cli_args.extend(["--password", password]) + if trust_server_cert is True: + cli_args.append("--trust-server-cert") + if config_dir is not None: + cli_args.extend(["--config-dir", str(config_dir)]) + if config_option is not None: + cli_args.extend(["--config-option", str(config_option)]) + + return await async_run( + cli_args, + cwd=run_cwd, + callback=self.progress_callback if log_in_real_time else None, + check_returncode=check_returncode, + timeout=timeout, + **kwargs, + ) + + async def checkout( + self, + *, + url: str, + revision: RevisionLiteral | str = None, + force: bool | None = None, + ignore_externals: bool | None = None, + depth: DepthLiteral = None, + quiet: bool | None = None, + username: str | None = None, + password: str | None = None, + no_auth_cache: bool | None = None, + non_interactive: bool | None = True, + trust_server_cert: bool | None = None, + # Special behavior + make_parents: bool | None = True, + # Pass-through + log_in_real_time: bool = False, + check_returncode: bool = True, + timeout: float | None = None, + ) -> str: + """Check out a working copy from an SVN repo asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.svn.Svn.checkout`. + + Parameters + ---------- + url : str + Repository URL to checkout + revision : str, optional + Number, '{ DATE }', 'HEAD', 'BASE', 'COMMITTED', 'PREV' + force : bool, optional + Force operation to run + ignore_externals : bool, optional + Ignore externals definitions + depth : str, optional + Sparse checkout depth + quiet : bool, optional + Suppress output + username : str, optional + SVN username + password : str, optional + SVN password + make_parents : bool, default True + Create checkout directory if it doesn't exist + check_returncode : bool, default True + Raise on non-zero exit code + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + """ + # URL and PATH come first, matching sync Svn.checkout pattern + local_flags: list[str] = [url, str(self.path)] + + if revision is not None: + local_flags.extend(["--revision", str(revision)]) + if depth is not None: + local_flags.extend(["--depth", depth]) + if force is True: + local_flags.append("--force") + if ignore_externals is True: + local_flags.append("--ignore-externals") + if quiet is True: + local_flags.append("--quiet") + + # libvcs special behavior + if make_parents and not self.path.exists(): + self.path.mkdir(parents=True) + + return await self.run( + ["checkout", *local_flags], + username=username, + password=password, + no_auth_cache=no_auth_cache, + non_interactive=non_interactive, + trust_server_cert=trust_server_cert, + log_in_real_time=log_in_real_time, + check_returncode=check_returncode, + timeout=timeout, + ) + + async def update( + self, + accept: str | None = None, + force: bool | None = None, + ignore_externals: bool | None = None, + parents: bool | None = None, + quiet: bool | None = None, + revision: str | None = None, + set_depth: str | None = None, + # Pass-through + log_in_real_time: bool = False, + check_returncode: bool = True, + timeout: float | None = None, + ) -> str: + """Fetch latest changes to working copy asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.svn.Svn.update`. + + Parameters + ---------- + accept : str, optional + Conflict resolution action + force : bool, optional + Force operation + ignore_externals : bool, optional + Ignore externals definitions + parents : bool, optional + Make intermediate directories + quiet : bool, optional + Suppress output + revision : str, optional + Update to specific revision + set_depth : str, optional + Set new working copy depth + check_returncode : bool, default True + Raise on non-zero exit code + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output + """ + local_flags: list[str] = [] + + if revision is not None: + local_flags.extend(["--revision", revision]) + if set_depth is not None: + local_flags.extend(["--set-depth", set_depth]) + if accept is not None: + local_flags.extend(["--accept", accept]) + if force is True: + local_flags.append("--force") + if ignore_externals is True: + local_flags.append("--ignore-externals") + if parents is True: + local_flags.append("--parents") + if quiet is True: + local_flags.append("--quiet") + + return await self.run( + ["update", *local_flags], + log_in_real_time=log_in_real_time, + check_returncode=check_returncode, + timeout=timeout, + ) + + async def info( + self, + target: StrPath | None = None, + revision: str | None = None, + depth: DepthLiteral = None, + incremental: bool | None = None, + recursive: bool | None = None, + xml: bool | None = None, + # Pass-through + log_in_real_time: bool = False, + check_returncode: bool = True, + timeout: float | None = None, + ) -> str: + """Return info about this SVN repository asynchronously. + + Async equivalent of :meth:`~libvcs.cmd.svn.Svn.info`. + + Parameters + ---------- + target : str | Path, optional + Target path or URL + revision : str, optional + Revision to get info for + depth : str, optional + Limit operation depth + incremental : bool, optional + Give output suitable for concatenation + recursive : bool, optional + Descend recursively + xml : bool, optional + Output in XML format + check_returncode : bool, default True + Raise on non-zero exit code + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Command output (optionally XML) + """ + local_flags: list[str] = [] + + if isinstance(target, pathlib.Path): + local_flags.append(str(target.absolute())) + elif isinstance(target, str): + local_flags.append(target) + + if revision is not None: + local_flags.extend(["--revision", revision]) + if depth is not None: + local_flags.extend(["--depth", depth]) + if incremental is True: + local_flags.append("--incremental") + if recursive is True: + local_flags.append("--recursive") + if xml is True: + local_flags.append("--xml") + + return await self.run( + ["info", *local_flags], + log_in_real_time=log_in_real_time, + check_returncode=check_returncode, + timeout=timeout, + ) diff --git a/src/libvcs/pytest_plugin.py b/src/libvcs/pytest_plugin.py index cbaeeb516..847a16941 100644 --- a/src/libvcs/pytest_plugin.py +++ b/src/libvcs/pytest_plugin.py @@ -2,22 +2,43 @@ from __future__ import annotations +import asyncio +import dataclasses import functools import getpass +import hashlib +import os import pathlib import random import shutil +import subprocess import textwrap +import time import typing as t +from importlib.metadata import version as get_package_version import pytest from libvcs import exc +from libvcs._internal.copy import copytree_reflink +from libvcs._internal.file_lock import atomic_init from libvcs._internal.run import _ENV, run from libvcs.sync.git import GitRemote, GitSync from libvcs.sync.hg import HgSync from libvcs.sync.svn import SvnSync +# Async support - conditional import +try: + import pytest_asyncio + + from libvcs.sync._async.git import AsyncGitSync + from libvcs.sync._async.hg import AsyncHgSync + from libvcs.sync._async.svn import AsyncSvnSync + + HAS_PYTEST_ASYNCIO = True +except ImportError: + HAS_PYTEST_ASYNCIO = False + class MaxUniqueRepoAttemptsExceeded(exc.LibVCSException): """Raised when exceeded threshold of attempts to find a unique repo destination.""" @@ -43,6 +64,156 @@ def __init__(self, attempts: int, *args: object) -> None: ) +# ============================================================================= +# Repo Fixture Result Dataclass +# ============================================================================= + +RepoT = t.TypeVar("RepoT") + + +@dataclasses.dataclass +class RepoFixtureResult(t.Generic[RepoT]): + """Result from repo fixture with metadata. + + This dataclass wraps the repository instance with additional metadata + about the fixture setup, including timing and cache information. + + Attributes + ---------- + repo : RepoT + The actual repository instance (GitSync, HgSync, SvnSync, or async variants) + path : pathlib.Path + Path to the repository working directory + remote_url : str + URL of the remote repository (file:// based) + master_copy_path : pathlib.Path + Path to the cached master copy + created_at : float + Time when the fixture was created (perf_counter) + from_cache : bool + True if the repo was copied from an existing master cache + + Examples + -------- + >>> def test_git_operations(git_repo): + ... # Direct access to repo methods via __getattr__ + ... revision = git_repo.get_revision() + ... + ... # Access metadata + ... assert git_repo.from_cache # True if using cached copy + ... print(f"Setup took: {time.perf_counter() - git_repo.created_at:.3f}s") + ... + ... # Access the underlying repo directly + ... assert isinstance(git_repo.repo, GitSync) + """ + + repo: RepoT + path: pathlib.Path + remote_url: str + master_copy_path: pathlib.Path + created_at: float + from_cache: bool + + def __getattr__(self, name: str) -> t.Any: + """Delegate attribute access to the underlying repo for backwards compat.""" + return getattr(self.repo, name) + + +# ============================================================================= +# XDG Persistent Cache Infrastructure +# ============================================================================= + + +def get_xdg_cache_dir() -> pathlib.Path: + """Get XDG cache directory for libvcs tests. + + Uses XDG_CACHE_HOME if set, otherwise defaults to ~/.cache. + """ + xdg_cache = os.environ.get("XDG_CACHE_HOME") + if xdg_cache: + return pathlib.Path(xdg_cache) / "libvcs-test" + return pathlib.Path.home() / ".cache" / "libvcs-test" + + +def get_vcs_version(cmd: list[str]) -> str: + """Get version string from a VCS command, or 'not-installed' if unavailable.""" + try: + result = subprocess.run( + cmd, + capture_output=True, + text=True, + timeout=5, + check=False, + ) + return result.stdout.strip() + except (FileNotFoundError, subprocess.TimeoutExpired): + return "not-installed" + + +def get_cache_key() -> str: + """Generate cache key from VCS versions and libvcs version. + + The cache is invalidated when any VCS tool or libvcs version changes. + Results are cached to disk with a ~23.5-hour TTL to avoid slow `hg --version` + calls (which take ~100ms due to Python startup overhead). + + Uses atomic file operations to prevent race conditions with parallel workers. + """ + base_dir = get_xdg_cache_dir() + key_file = base_dir / ".cache_key" + + # Try to return cached key (atomic read with full error handling) + # No exists() check - let stat() fail naturally to avoid TOCTOU race + try: + stat = key_file.stat() + # Use 23.5 hours (not 24) to avoid exact boundary race conditions + if time.time() - stat.st_mtime < 84600: + cached_key = key_file.read_text().strip() + # Validate format before using (guards against corruption) + if len(cached_key) == 12: + return cached_key + except (OSError, ValueError): + pass # File missing, stale, corrupt, or race condition - regenerate + + # Compute fresh key from VCS versions + versions = [ + get_vcs_version(["git", "--version"]), + get_vcs_version(["hg", "--version"]), # ~100ms due to Python startup + get_vcs_version(["svn", "--version"]), + get_package_version("libvcs"), + ] + version_str = "|".join(versions) + cache_key = hashlib.sha256(version_str.encode()).hexdigest()[:12] + + # Atomic write: write to temp file, then rename (atomic on POSIX) + try: + base_dir.mkdir(parents=True, exist_ok=True) + tmp_file = base_dir / f".cache_key.{os.getpid()}.tmp" + tmp_file.write_text(cache_key) + tmp_file.rename(key_file) + except OSError: + pass # Cache write failed, continue without caching + + return cache_key + + +def pytest_addoption(parser: pytest.Parser) -> None: + """Add libvcs pytest options.""" + group = parser.getgroup("libvcs", "libvcs fixture options") + group.addoption( + "--libvcs-cache-dir", + action="store", + metavar="PATH", + help="Override XDG cache directory for libvcs test fixtures", + ) + group.addoption( + "--libvcs-clear-cache", + action="store_true", + default=False, + help="Clear libvcs persistent cache before running tests", + ) + + DEFAULT_VCS_NAME = "Test user" DEFAULT_VCS_EMAIL = "test@example.com" @@ -80,6 +251,43 @@ def git_commit_envvars(vcs_name: str, vcs_email: str) -> _ENV: } +@pytest.fixture(scope="session") +def libvcs_persistent_cache(request: pytest.FixtureRequest) -> pathlib.Path: + """Return persistent cache directory for libvcs test fixtures. + + This cache persists across test sessions and is keyed by VCS + libvcs versions. + When any version changes, the cache is automatically invalidated. + + The cache location follows XDG Base Directory spec: + - Default: ~/.cache/libvcs-test// + - Override: --libvcs-cache-dir=PATH + + Use --libvcs-clear-cache to force cache rebuild. + """ + # Get cache directory (from option or XDG default) + custom_cache = request.config.getoption("--libvcs-cache-dir") + base_dir = pathlib.Path(custom_cache) if custom_cache else get_xdg_cache_dir() + + # Get version-based cache key + cache_key = get_cache_key() + cache_dir = base_dir / cache_key + + # Handle --libvcs-clear-cache + if request.config.getoption("--libvcs-clear-cache") and base_dir.exists(): + shutil.rmtree(base_dir) + + # NOTE: Automatic cleanup of old cache versions removed to prevent race + # conditions with pytest-xdist parallel workers. Old cache versions may + # accumulate but won't cause issues. Users can clean manually: + # rm -rf ~/.cache/libvcs-test/* + # Or use: --libvcs-clear-cache + + # Create cache directory + cache_dir.mkdir(parents=True, exist_ok=True) + + return cache_dir + + class RandomStrSequence: """Create a random string sequence.""" @@ -237,17 +445,15 @@ def clean() -> None: @pytest.fixture(scope="session") def remote_repos_path( - user_path: pathlib.Path, - request: pytest.FixtureRequest, + libvcs_persistent_cache: pathlib.Path, ) -> pathlib.Path: - """System's remote (file-based) repos to clone and push to. Emphemeral directory.""" - path = user_path / "remote_repos" - path.mkdir(exist_ok=True) + """Directory for remote repos and master copies, using persistent XDG cache. - def clean() -> None: - shutil.rmtree(path) - - request.addfinalizer(clean) + This ensures stable file:// URLs across test sessions, enabling proper + caching of cloned repositories. + """ + path = libvcs_persistent_cache / "remote_repos" + path.mkdir(exist_ok=True) return path @@ -317,9 +523,15 @@ def _create_git_remote_repo( @pytest.fixture(scope="session") -def libvcs_test_cache_path(tmp_path_factory: pytest.TempPathFactory) -> pathlib.Path: - """Return temporary directory to use as cache path for libvcs tests.""" - return tmp_path_factory.mktemp("libvcs-test-cache") +def libvcs_test_cache_path( + libvcs_persistent_cache: pathlib.Path, +) -> pathlib.Path: + """Return persistent cache directory for libvcs test fixtures. + + This now uses XDG persistent cache, which survives across test sessions + and is automatically invalidated when VCS or libvcs versions change. + """ + return libvcs_persistent_cache @pytest.fixture(scope="session") @@ -454,17 +666,30 @@ def git_remote_repo_single_commit_post_init( @pytest.fixture(scope="session") @skip_if_git_missing def git_remote_repo( - create_git_remote_repo: CreateRepoPytestFixtureFn, + remote_repos_path: pathlib.Path, + empty_git_repo: pathlib.Path, gitconfig: pathlib.Path, git_commit_envvars: _ENV, ) -> pathlib.Path: - """Copy the session-scoped Git repository to a temporary directory.""" - # TODO: Cache the effect of of this in a session-based repo - repo_path = create_git_remote_repo() - git_remote_repo_single_commit_post_init( - remote_repo_path=repo_path, - env=git_commit_envvars, - ) + """Return cached Git remote repo with an initial commit. + + Uses persistent XDG cache - repo persists across test sessions. + Uses atomic file locking for pytest-xdist worker coordination. + """ + repo_path = remote_repos_path / "git_remote_repo" + + # Fast path: already initialized + if (repo_path / ".libvcs_initialized").exists(): + return repo_path + + def do_init() -> None: + shutil.copytree(empty_git_repo, repo_path) + git_remote_repo_single_commit_post_init( + remote_repo_path=repo_path, + env=git_commit_envvars, + ) + + atomic_init(repo_path, do_init, marker_name=".libvcs_initialized") return repo_path @@ -571,20 +796,49 @@ def fn( @pytest.fixture(scope="session") @skip_if_svn_missing def svn_remote_repo( - create_svn_remote_repo: CreateRepoPytestFixtureFn, + remote_repos_path: pathlib.Path, + empty_svn_repo: pathlib.Path, ) -> pathlib.Path: - """Pre-made. Local file:// based SVN server.""" - return create_svn_remote_repo() + """Return cached SVN remote repo. + + Uses persistent XDG cache - repo persists across test sessions. + Uses atomic file locking for pytest-xdist worker coordination. + """ + repo_path = remote_repos_path / "svn_remote_repo" + + # Fast path: already initialized + if (repo_path / ".libvcs_initialized").exists(): + return repo_path + + def do_init() -> None: + shutil.copytree(empty_svn_repo, repo_path) + + atomic_init(repo_path, do_init, marker_name=".libvcs_initialized") + return repo_path @pytest.fixture(scope="session") @skip_if_svn_missing def svn_remote_repo_with_files( - create_svn_remote_repo: CreateRepoPytestFixtureFn, + remote_repos_path: pathlib.Path, + svn_remote_repo: pathlib.Path, ) -> pathlib.Path: - """Pre-made. Local file:// based SVN server.""" - repo_path = create_svn_remote_repo() - svn_remote_repo_single_commit_post_init(remote_repo_path=repo_path) + """Return cached SVN remote repo with files committed. + + Uses persistent XDG cache - repo persists across test sessions. + Uses atomic file locking for pytest-xdist worker coordination. + """ + repo_path = remote_repos_path / "svn_remote_repo_with_files" + + # Fast path: already initialized + if (repo_path / ".libvcs_initialized").exists(): + return repo_path + + def do_init() -> None: + shutil.copytree(svn_remote_repo, repo_path) + svn_remote_repo_single_commit_post_init(remote_repo_path=repo_path) + + atomic_init(repo_path, do_init, marker_name=".libvcs_initialized") return repo_path @@ -681,15 +935,29 @@ def fn( @skip_if_hg_missing def hg_remote_repo( remote_repos_path: pathlib.Path, - create_hg_remote_repo: CreateRepoPytestFixtureFn, + empty_hg_repo: pathlib.Path, hgconfig: pathlib.Path, ) -> pathlib.Path: - """Pre-made, file-based repo for push and pull.""" - repo_path = create_hg_remote_repo() - hg_remote_repo_single_commit_post_init( - remote_repo_path=repo_path, - env={"HGRCPATH": str(hgconfig)}, - ) + """Return cached Mercurial remote repo with an initial commit. + + Uses persistent XDG cache - repo persists across test sessions. + Uses atomic file locking for pytest-xdist worker coordination. + """ + repo_path = remote_repos_path / "hg_remote_repo" + + # Fast path: already initialized + if (repo_path / ".libvcs_initialized").exists(): + return repo_path + + def do_init() -> None: + shutil.copytree(empty_hg_repo, repo_path) + # Add initial commit (slow: ~288ms due to hg add + commit) + hg_remote_repo_single_commit_post_init( + remote_repo_path=repo_path, + env={"HGRCPATH": str(hgconfig)}, + ) + + atomic_init(repo_path, do_init, marker_name=".libvcs_initialized") return repo_path @@ -699,32 +967,56 @@ def git_repo( projects_path: pathlib.Path, git_remote_repo: pathlib.Path, set_gitconfig: pathlib.Path, -) -> GitSync: - """Pre-made git clone of remote repo checked out to user's projects dir.""" +) -> RepoFixtureResult[GitSync]: + """Pre-made git clone of remote repo checked out to user's projects dir. + + Returns a RepoFixtureResult containing the GitSync instance and metadata. + The underlying GitSync methods are accessible directly via __getattr__. + """ + created_at = time.perf_counter() remote_repo_name = unique_repo_name(remote_repos_path=projects_path) new_checkout_path = projects_path / remote_repo_name - master_copy = remote_repos_path / "git_repo" - - if master_copy.exists(): - shutil.copytree(master_copy, new_checkout_path) - return GitSync( - url=f"file://{git_remote_repo}", - path=str(new_checkout_path), + remote_url = f"file://{git_remote_repo}" + # Unified master copy shared with async_git_repo + master_copy = remote_repos_path / "git_repo_master" + + def create_master() -> None: + """Create master copy atomically - only one worker does this.""" + repo = GitSync( + url=remote_url, + path=master_copy, + remotes={ + "origin": GitRemote( + name="origin", + push_url=remote_url, + fetch_url=remote_url, + ), + }, ) + repo.obtain() - git_repo = GitSync( - url=f"file://{git_remote_repo}", - path=master_copy, - remotes={ - "origin": GitRemote( - name="origin", - push_url=f"file://{git_remote_repo}", - fetch_url=f"file://{git_remote_repo}", - ), - }, + # atomic_init returns True if this process did the init, False if waited + from_cache = not atomic_init( + master_copy, + create_master, + marker_name=".libvcs_master_initialized", + ) + + # All workers get a unique copy from master (exclude marker file) + copytree_reflink( + master_copy, + new_checkout_path, + ignore=shutil.ignore_patterns(".libvcs_master_initialized"), + ) + repo = GitSync(url=remote_url, path=str(new_checkout_path)) + return RepoFixtureResult( + repo=repo, + path=new_checkout_path, + remote_url=remote_url, + master_copy_path=master_copy, + created_at=created_at, + from_cache=from_cache, ) - git_repo.obtain() - return git_repo @pytest.fixture @@ -733,25 +1025,45 @@ def hg_repo( projects_path: pathlib.Path, hg_remote_repo: pathlib.Path, set_hgconfig: pathlib.Path, -) -> HgSync: - """Pre-made hg clone of remote repo checked out to user's projects dir.""" +) -> RepoFixtureResult[HgSync]: + """Pre-made hg clone of remote repo checked out to user's projects dir. + + Returns a RepoFixtureResult containing the HgSync instance and metadata. + """ + created_at = time.perf_counter() remote_repo_name = unique_repo_name(remote_repos_path=projects_path) new_checkout_path = projects_path / remote_repo_name - master_copy = remote_repos_path / "hg_repo" - - if master_copy.exists(): - shutil.copytree(master_copy, new_checkout_path) - return HgSync( - url=f"file://{hg_remote_repo}", - path=str(new_checkout_path), - ) + remote_url = f"file://{hg_remote_repo}" + # Unified master copy shared with async_hg_repo + master_copy = remote_repos_path / "hg_repo_master" + + def create_master() -> None: + """Create master copy atomically - only one worker does this.""" + repo = HgSync(url=remote_url, path=master_copy) + repo.obtain() + + # atomic_init returns True if this process did the init, False if waited + from_cache = not atomic_init( + master_copy, + create_master, + marker_name=".libvcs_master_initialized", + ) - hg_repo = HgSync( - url=f"file://{hg_remote_repo}", - path=master_copy, + # All workers get a unique copy from master (exclude marker file) + copytree_reflink( + master_copy, + new_checkout_path, + ignore=shutil.ignore_patterns(".libvcs_master_initialized"), + ) + repo = HgSync(url=remote_url, path=str(new_checkout_path)) + return RepoFixtureResult( + repo=repo, + path=new_checkout_path, + remote_url=remote_url, + master_copy_path=master_copy, + created_at=created_at, + from_cache=from_cache, ) - hg_repo.obtain() - return hg_repo @pytest.fixture @@ -759,25 +1071,230 @@ def svn_repo( remote_repos_path: pathlib.Path, projects_path: pathlib.Path, svn_remote_repo: pathlib.Path, -) -> SvnSync: - """Pre-made svn clone of remote repo checked out to user's projects dir.""" +) -> RepoFixtureResult[SvnSync]: + """Pre-made svn checkout of remote repo checked out to user's projects dir. + + Returns a RepoFixtureResult containing the SvnSync instance and metadata. + """ + created_at = time.perf_counter() remote_repo_name = unique_repo_name(remote_repos_path=projects_path) new_checkout_path = projects_path / remote_repo_name - master_copy = remote_repos_path / "svn_repo" + remote_url = f"file://{svn_remote_repo}" + # Unified master copy shared with async_svn_repo + master_copy = remote_repos_path / "svn_repo_master" + + def create_master() -> None: + """Create master copy atomically - only one worker does this.""" + repo = SvnSync(url=remote_url, path=str(master_copy)) + repo.obtain() + + # atomic_init returns True if this process did the init, False if waited + from_cache = not atomic_init( + master_copy, + create_master, + marker_name=".libvcs_master_initialized", + ) + + # All workers get a unique copy from master (exclude marker file) + copytree_reflink( + master_copy, + new_checkout_path, + ignore=shutil.ignore_patterns(".libvcs_master_initialized"), + ) + repo = SvnSync(url=remote_url, path=str(new_checkout_path)) + return RepoFixtureResult( + repo=repo, + path=new_checkout_path, + remote_url=remote_url, + master_copy_path=master_copy, + created_at=created_at, + from_cache=from_cache, + ) + + +# ============================================================================= +# Async Fixtures +# ============================================================================= + +if HAS_PYTEST_ASYNCIO: + + @pytest_asyncio.fixture + @skip_if_git_missing + async def async_git_repo( + remote_repos_path: pathlib.Path, + projects_path: pathlib.Path, + git_remote_repo: pathlib.Path, + set_gitconfig: pathlib.Path, + ) -> t.AsyncGenerator[RepoFixtureResult[AsyncGitSync], None]: + """Pre-made async git clone of remote repo checked out to user's projects dir. + + Async equivalent of :func:`git_repo` fixture. + Returns a RepoFixtureResult containing the AsyncGitSync instance and metadata. + + Examples + -------- + >>> @pytest.mark.asyncio + ... async def test_git_operations(async_git_repo): + ... revision = await async_git_repo.get_revision() + ... assert async_git_repo.from_cache # True if using cached copy + """ + created_at = time.perf_counter() + remote_repo_name = unique_repo_name(remote_repos_path=projects_path) + new_checkout_path = projects_path / remote_repo_name + remote_url = f"file://{git_remote_repo}" + # Unified master copy shared with git_repo + master_copy = remote_repos_path / "git_repo_master" + + def create_master() -> None: + """Create master copy atomically - only one worker does this.""" + # Use sync GitSync for atomic init (only runs once per session) + sync_repo = GitSync( + url=remote_url, + path=master_copy, + remotes={ + "origin": GitRemote( + name="origin", + push_url=remote_url, + fetch_url=remote_url, + ), + }, + ) + sync_repo.obtain() - if master_copy.exists(): - shutil.copytree(master_copy, new_checkout_path) - return SvnSync( - url=f"file://{svn_remote_repo}", - path=str(new_checkout_path), + # atomic_init returns True if this process did the init, False if waited + from_cache = not atomic_init( + master_copy, + create_master, + marker_name=".libvcs_master_initialized", ) - svn_repo = SvnSync( - url=f"file://{svn_remote_repo}", - path=str(projects_path / "svn_repo"), - ) - svn_repo.obtain() - return svn_repo + # All workers get a unique copy from master (exclude marker file) + copytree_reflink( + master_copy, + new_checkout_path, + ignore=shutil.ignore_patterns(".libvcs_master_initialized"), + ) + repo = AsyncGitSync(url=remote_url, path=new_checkout_path) + yield RepoFixtureResult( + repo=repo, + path=new_checkout_path, + remote_url=remote_url, + master_copy_path=master_copy, + created_at=created_at, + from_cache=from_cache, + ) + + @pytest_asyncio.fixture + @skip_if_hg_missing + async def async_hg_repo( + remote_repos_path: pathlib.Path, + projects_path: pathlib.Path, + hg_remote_repo: pathlib.Path, + set_hgconfig: pathlib.Path, + ) -> t.AsyncGenerator[RepoFixtureResult[AsyncHgSync], None]: + """Pre-made async hg clone of remote repo checked out to user's projects dir. + + Async equivalent of :func:`hg_repo` fixture. + Returns a RepoFixtureResult containing the AsyncHgSync instance and metadata. + + Examples + -------- + >>> @pytest.mark.asyncio + ... async def test_hg_operations(async_hg_repo): + ... revision = await async_hg_repo.get_revision() + ... assert async_hg_repo.from_cache # True if using cached copy + """ + created_at = time.perf_counter() + remote_repo_name = unique_repo_name(remote_repos_path=projects_path) + new_checkout_path = projects_path / remote_repo_name + remote_url = f"file://{hg_remote_repo}" + # Unified master copy shared with hg_repo + master_copy = remote_repos_path / "hg_repo_master" + + def create_master() -> None: + """Create master copy atomically - only one worker does this.""" + # Use sync HgSync for atomic init (only runs once per session) + sync_repo = HgSync(url=remote_url, path=master_copy) + sync_repo.obtain() + + # atomic_init returns True if this process did the init, False if waited + from_cache = not atomic_init( + master_copy, + create_master, + marker_name=".libvcs_master_initialized", + ) + + # All workers get a unique copy from master (exclude marker file) + copytree_reflink( + master_copy, + new_checkout_path, + ignore=shutil.ignore_patterns(".libvcs_master_initialized"), + ) + repo = AsyncHgSync(url=remote_url, path=new_checkout_path) + yield RepoFixtureResult( + repo=repo, + path=new_checkout_path, + remote_url=remote_url, + master_copy_path=master_copy, + created_at=created_at, + from_cache=from_cache, + ) + + @pytest_asyncio.fixture + @skip_if_svn_missing + async def async_svn_repo( + remote_repos_path: pathlib.Path, + projects_path: pathlib.Path, + svn_remote_repo: pathlib.Path, + ) -> t.AsyncGenerator[RepoFixtureResult[AsyncSvnSync], None]: + """Pre-made async svn checkout of remote repo. + + Checked out to user's projects dir. + Async equivalent of :func:`svn_repo` fixture. + Returns a RepoFixtureResult containing the AsyncSvnSync instance and metadata. + + Examples + -------- + >>> @pytest.mark.asyncio + ... async def test_svn_operations(async_svn_repo): + ... revision = await async_svn_repo.get_revision() + ... assert async_svn_repo.from_cache # True if using cached copy + """ + created_at = time.perf_counter() + remote_repo_name = unique_repo_name(remote_repos_path=projects_path) + new_checkout_path = projects_path / remote_repo_name + remote_url = f"file://{svn_remote_repo}" + # Unified master copy shared with svn_repo + master_copy = remote_repos_path / "svn_repo_master" + + def create_master() -> None: + """Create master copy atomically - only one worker does this.""" + # Use sync SvnSync for atomic init (only runs once per session) + sync_repo = SvnSync(url=remote_url, path=str(master_copy)) + sync_repo.obtain() + + # atomic_init returns True if this process did the init, False if waited + from_cache = not atomic_init( + master_copy, + create_master, + marker_name=".libvcs_master_initialized", + ) + + # All workers get a unique copy from master (exclude marker file) + copytree_reflink( + master_copy, + new_checkout_path, + ignore=shutil.ignore_patterns(".libvcs_master_initialized"), + ) + repo = AsyncSvnSync(url=remote_url, path=new_checkout_path) + yield RepoFixtureResult( + repo=repo, + path=new_checkout_path, + remote_url=remote_url, + master_copy_path=master_copy, + created_at=created_at, + from_cache=from_cache, + ) @pytest.fixture @@ -798,6 +1315,8 @@ def add_doctest_fixtures( if not isinstance(request._pyfuncitem, DoctestItem): # Only run on doctest items return + # Add asyncio for async doctests + doctest_namespace["asyncio"] = asyncio doctest_namespace["tmp_path"] = tmp_path if shutil.which("git"): doctest_namespace["create_git_remote_repo"] = functools.partial( diff --git a/src/libvcs/sync/_async/__init__.py b/src/libvcs/sync/_async/__init__.py new file mode 100644 index 000000000..79d8bd391 --- /dev/null +++ b/src/libvcs/sync/_async/__init__.py @@ -0,0 +1,21 @@ +"""Async repository synchronization classes. + +This module provides async equivalents of the sync classes +in :mod:`libvcs.sync`. + +Note +---- +This is an internal API not covered by versioning policy. +""" + +from __future__ import annotations + +from libvcs.sync._async.git import AsyncGitSync +from libvcs.sync._async.hg import AsyncHgSync +from libvcs.sync._async.svn import AsyncSvnSync + +__all__ = [ + "AsyncGitSync", + "AsyncHgSync", + "AsyncSvnSync", +] diff --git a/src/libvcs/sync/_async/base.py b/src/libvcs/sync/_async/base.py new file mode 100644 index 000000000..89365568f --- /dev/null +++ b/src/libvcs/sync/_async/base.py @@ -0,0 +1,190 @@ +"""Foundational tools for async VCS managers. + +Async equivalent of :mod:`libvcs.sync.base`. + +Note +---- +This is an internal API not covered by versioning policy. +""" + +from __future__ import annotations + +import logging +import pathlib +import typing as t +from urllib import parse as urlparse + +from libvcs._internal.async_run import ( + AsyncProgressCallbackProtocol, + async_run, +) +from libvcs._internal.run import CmdLoggingAdapter +from libvcs._internal.types import StrPath +from libvcs.sync.base import convert_pip_url + +logger = logging.getLogger(__name__) + + +class AsyncBaseSync: + """Base class for async repository synchronization. + + Async equivalent of :class:`~libvcs.sync.base.BaseSync`. + """ + + log_in_real_time: bool | None = None + """Log command output to buffer""" + + bin_name: str = "" + """VCS app name, e.g. 'git'""" + + schemes: tuple[str, ...] = () + """List of supported schemes to register in urlparse.uses_netloc""" + + def __init__( + self, + *, + url: str, + path: StrPath, + progress_callback: AsyncProgressCallbackProtocol | None = None, + **kwargs: t.Any, + ) -> None: + """Initialize async VCS synchronization object. + + Parameters + ---------- + url : str + URL of the repository + path : str | Path + Local path for the repository + progress_callback : AsyncProgressCallbackProtocol, optional + Async callback for progress updates + + Examples + -------- + >>> import asyncio + >>> class MyRepo(AsyncBaseSync): + ... bin_name = 'git' + ... async def obtain(self): + ... await self.run(['clone', self.url, str(self.path)]) + """ + self.url = url + + #: Async callback for run updates + self.progress_callback = progress_callback + + #: Directory to check out + self.path: pathlib.Path + if isinstance(path, pathlib.Path): + self.path = path + else: + self.path = pathlib.Path(path) + + if "rev" in kwargs: + self.rev = kwargs["rev"] + + # Register schemes with urlparse + if hasattr(self, "schemes"): + urlparse.uses_netloc.extend(self.schemes) + if getattr(urlparse, "uses_fragment", None): + urlparse.uses_fragment.extend(self.schemes) + + #: Logging attribute + self.log: CmdLoggingAdapter = CmdLoggingAdapter( + bin_name=self.bin_name, + keyword=self.repo_name, + logger=logger, + extra={}, + ) + + @property + def repo_name(self) -> str: + """Return the short name of a repo checkout.""" + return self.path.stem + + @classmethod + def from_pip_url(cls, pip_url: str, **kwargs: t.Any) -> AsyncBaseSync: + """Create async synchronization object from pip-style URL.""" + url, rev = convert_pip_url(pip_url) + return cls(url=url, rev=rev, **kwargs) + + async def run( + self, + cmd: StrPath | list[StrPath], + cwd: StrPath | None = None, + check_returncode: bool = True, + log_in_real_time: bool | None = None, + timeout: float | None = None, + **kwargs: t.Any, + ) -> str: + """Run a command asynchronously. + + This method will also prefix the VCS command bin_name. By default runs + using the cwd of the repo. + + Parameters + ---------- + cmd : str | list[str] + Command and arguments to run + cwd : str | Path, optional + Working directory, defaults to self.path + check_returncode : bool, default True + Raise on non-zero exit code + log_in_real_time : bool, optional + Stream output to callback + timeout : float, optional + Timeout in seconds + + Returns + ------- + str + Combined stdout/stderr output + """ + if cwd is None: + cwd = getattr(self, "path", None) + + if isinstance(cmd, list): + full_cmd = [self.bin_name, *[str(c) for c in cmd]] + else: + full_cmd = [self.bin_name, str(cmd)] + + should_log = log_in_real_time or self.log_in_real_time or False + + return await async_run( + full_cmd, + callback=self.progress_callback if should_log else None, + check_returncode=check_returncode, + cwd=cwd, + timeout=timeout, + **kwargs, + ) + + def ensure_dir(self, *args: t.Any, **kwargs: t.Any) -> bool: + """Assure destination path exists. If not, create directories. + + Note: This is synchronous as it's just filesystem operations. + """ + if self.path.exists(): + return True + + if not self.path.parent.exists(): + self.path.parent.mkdir(parents=True) + + if not self.path.exists(): + self.log.debug( + f"Project directory for {self.repo_name} does not exist @ {self.path}", + ) + self.path.mkdir(parents=True) + + return True + + async def update_repo(self, *args: t.Any, **kwargs: t.Any) -> None: + """Pull latest changes from remote repository.""" + raise NotImplementedError + + async def obtain(self, *args: t.Any, **kwargs: t.Any) -> None: + """Checkout initial VCS repository from remote repository.""" + raise NotImplementedError + + def __repr__(self) -> str: + """Representation of async VCS management object.""" + return f"<{self.__class__.__name__} {self.repo_name}>" diff --git a/src/libvcs/sync/_async/git.py b/src/libvcs/sync/_async/git.py new file mode 100644 index 000000000..07cf4e6d0 --- /dev/null +++ b/src/libvcs/sync/_async/git.py @@ -0,0 +1,549 @@ +"""Async tool to manage a local git clone from an external git repository. + +Async equivalent of :mod:`libvcs.sync.git`. + +Note +---- +This is an internal API not covered by versioning policy. +""" + +from __future__ import annotations + +import pathlib +import re +import typing as t + +from libvcs import exc +from libvcs._internal.async_run import AsyncProgressCallbackProtocol +from libvcs._internal.types import StrPath +from libvcs.cmd._async.git import AsyncGit +from libvcs.sync._async.base import AsyncBaseSync +from libvcs.sync.base import VCSLocation, convert_pip_url as base_convert_pip_url +from libvcs.sync.git import ( + GitRemote, + GitRemoteOriginMissing, + GitRemoteRefNotFound, + GitRemotesArgs, + GitRemoteSetError, + GitStatus, + GitSyncRemoteDict, +) + + +def convert_pip_url(pip_url: str) -> VCSLocation: + """Convert pip-style URL to a VCSLocation. + + Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. + """ + if "://" not in pip_url: + assert "file:" not in pip_url + pip_url = pip_url.replace("git+", "git+ssh://") + url, rev = base_convert_pip_url(pip_url) + url = url.replace("ssh://", "") + elif "github.com:" in pip_url: + msg = ( + "Repo {} is malformatted, please use the convention {} for " + "ssh / private GitHub repositories.".format( + pip_url, + "git+https://github.com/username/repo.git", + ) + ) + raise exc.LibVCSException(msg) + else: + url, rev = base_convert_pip_url(pip_url) + + return VCSLocation(url=url, rev=rev) + + +class AsyncGitSync(AsyncBaseSync): + """Async tool to manage a local git clone from an external git repository. + + Async equivalent of :class:`~libvcs.sync.git.GitSync`. + + Examples + -------- + >>> async def example(): + ... url = f'file://{create_git_remote_repo()}' + ... repo_path = tmp_path / 'git_sync_repo' + ... repo = AsyncGitSync(url=url, path=repo_path) + ... await repo.obtain() + ... return (repo_path / '.git').exists() + >>> asyncio.run(example()) + True + """ + + bin_name = "git" + schemes = ("git+http", "git+https", "git+file") + cmd: AsyncGit + _remotes: GitSyncRemoteDict + + def __init__( + self, + *, + url: str, + path: StrPath, + remotes: GitRemotesArgs = None, + progress_callback: AsyncProgressCallbackProtocol | None = None, + **kwargs: t.Any, + ) -> None: + """Initialize async git repository manager. + + Parameters + ---------- + url : str + URL of the repository + path : str | Path + Local path for the repository + remotes : dict, optional + Additional remotes to configure + progress_callback : AsyncProgressCallbackProtocol, optional + Async callback for progress updates + """ + self.git_shallow = kwargs.pop("git_shallow", False) + self.tls_verify = kwargs.pop("tls_verify", False) + + self._remotes: GitSyncRemoteDict + + if remotes is None: + self._remotes = { + "origin": GitRemote(name="origin", fetch_url=url, push_url=url), + } + elif isinstance(remotes, dict): + self._remotes = {} + for remote_name, remote_url in remotes.items(): + if isinstance(remote_url, str): + self._remotes[remote_name] = GitRemote( + name=remote_name, + fetch_url=remote_url, + push_url=remote_url, + ) + elif isinstance(remote_url, dict): + self._remotes[remote_name] = GitRemote( + fetch_url=remote_url["fetch_url"], + push_url=remote_url["push_url"], + name=remote_name, + ) + elif isinstance(remote_url, GitRemote): + self._remotes[remote_name] = remote_url + + if url and "origin" not in self._remotes: + self._remotes["origin"] = GitRemote( + name="origin", + fetch_url=url, + push_url=url, + ) + + super().__init__( + url=url, path=path, progress_callback=progress_callback, **kwargs + ) + + self.cmd = AsyncGit(path=path, progress_callback=self.progress_callback) + + origin = ( + self._remotes.get("origin") + if "origin" in self._remotes + else next(iter(self._remotes.items()))[1] + ) + if origin is None: + raise GitRemoteOriginMissing(remotes=list(self._remotes.keys())) + self.url = self.chomp_protocol(origin.fetch_url) + + @classmethod + def from_pip_url(cls, pip_url: str, **kwargs: t.Any) -> AsyncGitSync: + """Clone a git repository from a pip-style URL.""" + url, rev = convert_pip_url(pip_url) + return cls(url=url, rev=rev, **kwargs) + + @staticmethod + def chomp_protocol(url: str) -> str: + """Remove VCS protocol prefix from URL. + + Parameters + ---------- + url : str + URL possibly with git+ prefix + + Returns + ------- + str + URL without git+ prefix + """ + if url.startswith("git+"): + return url[4:] + return url + + async def get_revision(self) -> str: + """Return current revision. Initial repositories return 'initial'.""" + try: + return await self.cmd.rev_parse( + verify=True, args="HEAD", check_returncode=True + ) + except exc.CommandError: + return "initial" + + async def obtain(self, *args: t.Any, **kwargs: t.Any) -> None: + """Retrieve the repository, clone if doesn't exist.""" + self.ensure_dir() + + url = self.url + + self.log.info("Cloning.") + await self.cmd.clone( + url=url, + progress=True, + depth=1 if self.git_shallow else None, + config={"http.sslVerify": False} if self.tls_verify else None, + log_in_real_time=True, + ) + + self.log.info("Initializing submodules.") + await self.cmd.submodule.init(log_in_real_time=True) + await self.cmd.submodule.update( + init=True, + recursive=True, + log_in_real_time=True, + ) + + await self.set_remotes(overwrite=True) + + async def update_repo( + self, + set_remotes: bool = False, + *args: t.Any, + **kwargs: t.Any, + ) -> None: + """Pull latest changes from git remote.""" + self.ensure_dir() + + if not pathlib.Path(self.path / ".git").is_dir(): + await self.obtain() + await self.update_repo(set_remotes=set_remotes) + return + + if set_remotes: + await self.set_remotes(overwrite=True) + + # Get requested revision or tag + url, git_tag = self.url, getattr(self, "rev", None) + + if not git_tag: + self.log.debug("No git revision set, defaulting to origin/master") + symref = await self.cmd.symbolic_ref(name="HEAD", short=True) + git_tag = symref.rstrip() if symref else "origin/master" + self.log.debug("git_tag: %s", git_tag) + + self.log.info("Updating to '%s'.", git_tag) + + # Get head sha + try: + head_sha = await self.cmd.rev_list( + commit="HEAD", + max_count=1, + check_returncode=True, + ) + except exc.CommandError: + self.log.exception("Failed to get the hash for HEAD") + return + + self.log.debug("head_sha: %s", head_sha) + + # Check if it's a remote ref + show_ref_output = await self.cmd.show_ref( + pattern=git_tag, check_returncode=False + ) + self.log.debug("show_ref_output: %s", show_ref_output) + is_remote_ref = "remotes" in show_ref_output + self.log.debug("is_remote_ref: %s", is_remote_ref) + + # Get remote name + git_remote_name = await self.get_current_remote_name() + + if f"refs/remotes/{git_tag}" in show_ref_output: + m = re.match( + r"^[0-9a-f]{40} refs/remotes/" + r"(?P[^/]+)/" + r"(?P.+)$", + show_ref_output, + re.MULTILINE, + ) + if m is None: + raise GitRemoteRefNotFound(git_tag=git_tag, ref_output=show_ref_output) + git_remote_name = m.group("git_remote_name") + git_tag = m.group("git_tag") + self.log.debug("git_remote_name: %s", git_remote_name) + self.log.debug("git_tag: %s", git_tag) + + # Get tag sha + try: + error_code = 0 + tag_sha = await self.cmd.rev_list( + commit=git_remote_name + "/" + git_tag if is_remote_ref else git_tag, + max_count=1, + ) + except exc.CommandError as e: + error_code = e.returncode if e.returncode is not None else 0 + tag_sha = "" + self.log.debug("tag_sha: %s", tag_sha) + + # Is the hash checkout out what we want? + somethings_up = (error_code, is_remote_ref, tag_sha != head_sha) + if all(not x for x in somethings_up): + self.log.info("Already up-to-date.") + return + + try: + await self.cmd.fetch(log_in_real_time=True, check_returncode=True) + except exc.CommandError: + self.log.exception("Failed to fetch repository '%s'", url) + return + + if is_remote_ref: + # Check if stash is needed + try: + process = await self.cmd.status(porcelain=True, untracked_files="no") + except exc.CommandError: + self.log.exception("Failed to get the status") + return + need_stash = len(process) > 0 + + # Stash changes if needed + if need_stash: + git_stash_save_options = "--quiet" + try: + await self.cmd.stash.save(message=git_stash_save_options) + except exc.CommandError: + self.log.exception("Failed to stash changes") + + # Checkout the remote branch + try: + await self.cmd.checkout(branch=git_tag) + except exc.CommandError: + self.log.exception("Failed to checkout tag: '%s'", git_tag) + return + + # Rebase changes from the remote branch + try: + await self.cmd.rebase(upstream=git_remote_name + "/" + git_tag) + except exc.CommandError as e: + if any(msg in str(e) for msg in ["invalid_upstream", "Aborting"]): + self.log.exception("Invalid upstream remote. Rebase aborted.") + else: + # Rebase failed: Restore previous state + await self.cmd.rebase(abort=True) + if need_stash: + await self.cmd.stash.pop(index=True, quiet=True) + + self.log.exception( + f"\nFailed to rebase in: '{self.path}'.\n" + "You will have to resolve the conflicts manually", + ) + return + + if need_stash: + try: + await self.cmd.stash.pop(index=True, quiet=True) + except exc.CommandError: + # Stash pop --index failed: Try again dropping the index + await self.cmd.reset(hard=True, quiet=True) + try: + await self.cmd.stash.pop(quiet=True) + except exc.CommandError: + # Stash pop failed: Restore previous state + await self.cmd.reset(pathspec=head_sha, hard=True, quiet=True) + await self.cmd.stash.pop(index=True, quiet=True) + self.log.exception( + f"\nFailed to rebase in: '{self.path}'.\n" + "You will have to resolve the conflicts manually", + ) + return + + else: + try: + await self.cmd.checkout(branch=git_tag) + except exc.CommandError: + self.log.exception("Failed to checkout tag: '%s'", git_tag) + return + + await self.cmd.submodule.update( + recursive=True, init=True, log_in_real_time=True + ) + + async def set_remotes(self, overwrite: bool = False) -> None: + """Apply remotes in local repository to match configuration.""" + remotes = self._remotes + if isinstance(remotes, dict): + for remote_name, git_remote_repo in remotes.items(): + existing_remote = await self.remote(remote_name) + if isinstance(git_remote_repo, GitRemote): + if ( + not existing_remote + or existing_remote.fetch_url != git_remote_repo.fetch_url + ): + await self.set_remote( + name=remote_name, + url=git_remote_repo.fetch_url, + overwrite=overwrite, + ) + existing_remote = await self.remote(remote_name) + if git_remote_repo.push_url and ( + not existing_remote + or existing_remote.push_url != git_remote_repo.push_url + ): + await self.set_remote( + name=remote_name, + url=git_remote_repo.push_url, + push=True, + overwrite=overwrite, + ) + elif ( + not existing_remote + or existing_remote.fetch_url != git_remote_repo.fetch_url + ): + await self.set_remote( + name=remote_name, + url=git_remote_repo.fetch_url, + overwrite=overwrite, + ) + + async def remotes_get(self) -> GitSyncRemoteDict: + """Return remotes like git remote -v. + + Returns + ------- + dict + Dictionary of remote names to GitRemote objects + """ + remotes: GitSyncRemoteDict = {} + + ret = await self.cmd.remotes.ls() + for remote_name in ret: + remote_name = remote_name.strip() + if not remote_name: + continue + try: + remote_output = await self.cmd.remotes.show( + name=remote_name, + verbose=True, + ) + except exc.CommandError: + self.log.exception("Failed to get remote info for %s", remote_name) + continue + + # Parse remote output + fetch_url = "" + push_url = "" + for line in remote_output.splitlines(): + line = line.strip() + if "(fetch)" in line: + fetch_url = line.replace("(fetch)", "").strip() + elif "(push)" in line: + push_url = line.replace("(push)", "").strip() + + remotes[remote_name] = GitRemote( + name=remote_name, + fetch_url=fetch_url, + push_url=push_url, + ) + + return remotes + + async def remote(self, name: str) -> GitRemote | None: + """Get a specific remote by name. + + Parameters + ---------- + name : str + Remote name + + Returns + ------- + GitRemote | None + Remote info or None if not found + """ + remotes = await self.remotes_get() + return remotes.get(name) + + async def set_remote( + self, + *, + name: str, + url: str, + push: bool = False, + overwrite: bool = False, + ) -> None: + """Set or add a remote. + + Parameters + ---------- + name : str + Remote name + url : str + Remote URL + push : bool + Set push URL instead of fetch URL + overwrite : bool + Overwrite existing remote + """ + existing_remotes = await self.cmd.remotes.ls() + + if name in existing_remotes: + if push: + # Set push URL using git remote set-url --push + await self.cmd.run(["remote", "set-url", "--push", name, url]) + elif overwrite: + await self.cmd.run(["remote", "set-url", name, url]) + else: + await self.cmd.remotes.add(name=name, url=url) + + # Verify + remote = await self.remote(name) + if not remote: + raise GitRemoteSetError(remote_name=name) + + async def get_current_remote_name(self) -> str: + """Get the current remote name. + + Returns + ------- + str + Remote name (defaults to 'origin') + """ + try: + # Try to get the upstream remote + branch = await self.cmd.symbolic_ref(name="HEAD", short=True) + branch = branch.strip() + if branch: + # Get the remote for this branch + try: + remote = await self.cmd.run( + ["config", f"branch.{branch}.remote"], + check_returncode=False, + ) + if remote.strip(): + return remote.strip() + except exc.CommandError: + pass + except exc.CommandError: + pass + return "origin" + + async def get_git_version(self) -> str: + """Return git version. + + Returns + ------- + str + Git version string + """ + return await self.cmd.version() + + async def status(self) -> GitStatus: + """Return GitStatus with parsed git status information. + + Returns + ------- + GitStatus + Parsed git status information + """ + output = await self.cmd.status(short=True, branch=True, porcelain="2") + return GitStatus.from_stdout(output) diff --git a/src/libvcs/sync/_async/hg.py b/src/libvcs/sync/_async/hg.py new file mode 100644 index 000000000..9e049ac49 --- /dev/null +++ b/src/libvcs/sync/_async/hg.py @@ -0,0 +1,111 @@ +"""Async tool to manage a local hg (Mercurial) working copy from a repository. + +Async equivalent of :mod:`libvcs.sync.hg`. + +Note +---- +This is an internal API not covered by versioning policy. +""" + +from __future__ import annotations + +import pathlib +import typing as t + +from libvcs._internal.async_run import AsyncProgressCallbackProtocol +from libvcs._internal.types import StrPath +from libvcs.cmd._async.hg import AsyncHg +from libvcs.sync._async.base import AsyncBaseSync + + +class AsyncHgSync(AsyncBaseSync): + """Async tool to manage a local hg (Mercurial) repository cloned from a remote one. + + Async equivalent of :class:`~libvcs.sync.hg.HgSync`. + + Examples + -------- + >>> async def example(): + ... url = f'file://{create_hg_remote_repo()}' + ... repo_path = tmp_path / 'hg_sync_repo' + ... repo = AsyncHgSync(url=url, path=repo_path) + ... await repo.obtain() + ... return (repo_path / '.hg').exists() + >>> asyncio.run(example()) + True + """ + + bin_name = "hg" + schemes = ("hg", "hg+http", "hg+https", "hg+file") + cmd: AsyncHg + + def __init__( + self, + *, + url: str, + path: StrPath, + progress_callback: AsyncProgressCallbackProtocol | None = None, + **kwargs: t.Any, + ) -> None: + """Initialize async Mercurial repository manager. + + Parameters + ---------- + url : str + URL of the Mercurial repository + path : str | Path + Local path for the repository + progress_callback : AsyncProgressCallbackProtocol, optional + Async callback for progress updates + """ + super().__init__( + url=url, path=path, progress_callback=progress_callback, **kwargs + ) + + self.cmd = AsyncHg(path=path, progress_callback=self.progress_callback) + + async def obtain(self, *args: t.Any, **kwargs: t.Any) -> None: + """Clone and update a Mercurial repository to this location asynchronously. + + Async equivalent of :meth:`~libvcs.sync.hg.HgSync.obtain`. + """ + self.ensure_dir() + + self.log.info("Cloning.") + await self.cmd.clone( + no_update=True, + quiet=True, + url=self.url, + log_in_real_time=True, + ) + await self.cmd.update( + quiet=True, + check_returncode=True, + log_in_real_time=True, + ) + + async def get_revision(self) -> str: + """Get latest revision of this mercurial repository asynchronously. + + Async equivalent of :meth:`~libvcs.sync.hg.HgSync.get_revision`. + + Returns + ------- + str + Current revision number + """ + return await self.run(["parents", "--template={rev}"]) + + async def update_repo(self, *args: t.Any, **kwargs: t.Any) -> None: + """Pull changes from remote Mercurial repository asynchronously. + + Async equivalent of :meth:`~libvcs.sync.hg.HgSync.update_repo`. + """ + self.ensure_dir() + + if not pathlib.Path(self.path / ".hg").exists(): + await self.obtain() + await self.update_repo() + else: + await self.cmd.update() + await self.cmd.pull(update=True) diff --git a/src/libvcs/sync/_async/svn.py b/src/libvcs/sync/_async/svn.py new file mode 100644 index 000000000..35ce4801c --- /dev/null +++ b/src/libvcs/sync/_async/svn.py @@ -0,0 +1,269 @@ +"""Async tool to manage a local SVN (Subversion) working copy from a repository. + +Async equivalent of :mod:`libvcs.sync.svn`. + +Note +---- +This is an internal API not covered by versioning policy. +""" + +from __future__ import annotations + +import logging +import os +import pathlib +import re +import typing as t + +from libvcs._internal.async_run import AsyncProgressCallbackProtocol +from libvcs._internal.types import StrPath +from libvcs.cmd._async.svn import AsyncSvn +from libvcs.sync._async.base import AsyncBaseSync +from libvcs.sync.svn import SvnUrlRevFormattingError + +logger = logging.getLogger(__name__) + + +class AsyncSvnSync(AsyncBaseSync): + """Async tool to manage a local SVN working copy from a SVN repository. + + Async equivalent of :class:`~libvcs.sync.svn.SvnSync`. + + Examples + -------- + >>> async def example(): + ... url = f'file://{create_svn_remote_repo()}' + ... repo_path = tmp_path / 'svn_sync_repo' + ... repo = AsyncSvnSync(url=url, path=repo_path) + ... await repo.obtain() + ... return (repo_path / '.svn').exists() + >>> asyncio.run(example()) + True + """ + + bin_name = "svn" + schemes = ("svn", "svn+ssh", "svn+http", "svn+https", "svn+svn") + cmd: AsyncSvn + + def __init__( + self, + *, + url: str, + path: StrPath, + progress_callback: AsyncProgressCallbackProtocol | None = None, + **kwargs: t.Any, + ) -> None: + """Initialize async SVN working copy manager. + + Parameters + ---------- + url : str + URL of the SVN repository + path : str | Path + Local path for the working copy + progress_callback : AsyncProgressCallbackProtocol, optional + Async callback for progress updates + username : str, optional + Username for SVN authentication + password : str, optional + Password for SVN authentication + svn_trust_cert : bool, optional + Trust the SVN server certificate, default False + """ + self.svn_trust_cert = kwargs.pop("svn_trust_cert", False) + self.username = kwargs.get("username") + self.password = kwargs.get("password") + self.rev = kwargs.get("rev") + + super().__init__( + url=url, path=path, progress_callback=progress_callback, **kwargs + ) + + self.cmd = AsyncSvn(path=path, progress_callback=self.progress_callback) + + async def obtain( + self, quiet: bool | None = None, *args: t.Any, **kwargs: t.Any + ) -> None: + """Check out a working copy from a SVN repository asynchronously. + + Async equivalent of :meth:`~libvcs.sync.svn.SvnSync.obtain`. + + Parameters + ---------- + quiet : bool, optional + Suppress output + """ + url, rev = self.url, self.rev + + if rev is not None: + kwargs["revision"] = rev + if self.svn_trust_cert: + kwargs["trust_server_cert"] = True + + await self.cmd.checkout( + url=url, + username=self.username, + password=self.password, + non_interactive=True, + quiet=True, + check_returncode=True, + **kwargs, + ) + + async def get_revision_file(self, location: str) -> int: + """Return revision for a file asynchronously. + + Async equivalent of :meth:`~libvcs.sync.svn.SvnSync.get_revision_file`. + + Parameters + ---------- + location : str + Path to the file + + Returns + ------- + int + Revision number + """ + current_rev = await self.cmd.info(target=location) + + INI_RE = re.compile(r"^([^:]+):\s+(\S.*)$", re.MULTILINE) + + info_list = INI_RE.findall(current_rev) + return int(dict(info_list)["Revision"]) + + async def get_revision(self, location: str | None = None) -> int: + """Return maximum revision for all files under a given location asynchronously. + + Async equivalent of :meth:`~libvcs.sync.svn.SvnSync.get_revision`. + + Parameters + ---------- + location : str, optional + Path to check, defaults to self.url + + Returns + ------- + int + Maximum revision number + """ + if not location: + location = self.url + + if pathlib.Path(location).exists() and not pathlib.Path(location).is_dir(): + return await self.get_revision_file(location) + + # Note: taken from setuptools.command.egg_info + revision = 0 + + for base, dirs, _files in os.walk(location): + if ".svn" not in dirs: + dirs[:] = [] + continue # no sense walking uncontrolled subdirs + dirs.remove(".svn") + entries_fn = pathlib.Path(base) / ".svn" / "entries" + if not entries_fn.exists(): + # FIXME: should we warn? + continue + + dirurl, localrev = await self._get_svn_url_rev(base) + + if base == location: + assert dirurl is not None + base = dirurl + "/" # save the root url + elif not dirurl or not dirurl.startswith(base): + dirs[:] = [] + continue # not part of the same svn tree, skip it + revision = max(revision, localrev) + return revision + + async def update_repo( + self, + dest: str | None = None, + *args: t.Any, + **kwargs: t.Any, + ) -> None: + """Fetch changes from SVN repository to local working copy asynchronously. + + Async equivalent of :meth:`~libvcs.sync.svn.SvnSync.update_repo`. + + Parameters + ---------- + dest : str, optional + Destination path (unused, for API compatibility) + """ + self.ensure_dir() + if pathlib.Path(self.path / ".svn").exists(): + await self.cmd.checkout( + url=self.url, + username=self.username, + password=self.password, + non_interactive=True, + quiet=True, + check_returncode=True, + **kwargs, + ) + else: + await self.obtain() + await self.update_repo() + + async def _get_svn_url_rev(self, location: str) -> tuple[str | None, int]: + """Get SVN URL and revision from a working copy location asynchronously. + + Async equivalent of :meth:`~libvcs.sync.svn.SvnSync._get_svn_url_rev`. + + Parameters + ---------- + location : str + Path to the working copy + + Returns + ------- + tuple[str | None, int] + Repository URL and revision number + """ + svn_xml_url_re = re.compile(r'url="([^"]+)"') + svn_rev_re = re.compile(r'committed-rev="(\d+)"') + svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') + svn_info_xml_url_re = re.compile(r"(.*)") + + entries_path = pathlib.Path(location) / ".svn" / "entries" + if entries_path.exists(): + with entries_path.open() as f: + data = f.read() + else: # subversion >= 1.7 does not have the 'entries' file + data = "" + + url = None + if data.startswith(("8", "9", "10")): + entries = list(map(str.splitlines, data.split("\n\x0c\n"))) + del entries[0][0] # get rid of the '8' + url = entries[0][3] + revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0] + elif data.startswith(" None: + """Test async_run() with various commands.""" + if should_raise: + with pytest.raises(CommandError): + await async_run(args, **kwargs) + else: + output = await async_run(args, **kwargs) + if expected_output is not None: + assert output == expected_output + + @pytest.mark.asyncio + async def test_run_with_cwd(self, tmp_path: Path) -> None: + """Test async_run() uses specified working directory.""" + output = await async_run(["pwd"], cwd=tmp_path) + assert output == str(tmp_path) + + @pytest.mark.asyncio + async def test_run_with_timeout(self) -> None: + """Test async_run() respects timeout.""" + with pytest.raises(CommandTimeoutError): + await async_run(["sleep", "10"], timeout=0.1) + + @pytest.mark.asyncio + async def test_run_timeout_error_attributes(self) -> None: + """Test CommandTimeoutError has expected attributes.""" + with pytest.raises(CommandTimeoutError) as exc_info: + await async_run(["sleep", "10"], timeout=0.1) + + assert exc_info.value.returncode == -1 + assert "timed out" in exc_info.value.output + + @pytest.mark.asyncio + async def test_run_command_error_attributes(self) -> None: + """Test CommandError has expected attributes.""" + with pytest.raises(CommandError) as exc_info: + await async_run(["false"], check_returncode=True) + + assert exc_info.value.returncode == 1 + + @pytest.mark.asyncio + async def test_run_with_callback(self) -> None: + """Test async_run() calls progress callback.""" + progress_output: list[str] = [] + timestamps: list[datetime.datetime] = [] + + async def callback(output: str, timestamp: datetime.datetime) -> None: + progress_output.append(output) + timestamps.append(timestamp) + + # Use a command that writes to stderr + await async_run( + ["sh", "-c", "echo stderr_line >&2"], + callback=callback, + check_returncode=True, + ) + + # Should have received stderr output + final \r + assert len(progress_output) >= 1 + assert any("stderr_line" in p for p in progress_output) + # Final \r is sent + assert progress_output[-1] == "\r" + + @pytest.mark.asyncio + async def test_run_callback_receives_timestamps(self) -> None: + """Test callback receives valid datetime timestamps.""" + timestamps: list[datetime.datetime] = [] + + async def callback(output: str, timestamp: datetime.datetime) -> None: + timestamps.append(timestamp) + + await async_run( + ["sh", "-c", "echo line >&2"], + callback=callback, + ) + + assert len(timestamps) >= 1 + for ts in timestamps: + assert isinstance(ts, datetime.datetime) + + @pytest.mark.asyncio + async def test_run_stderr_on_error(self) -> None: + """Test stderr content is returned on command error.""" + output = await async_run( + ["sh", "-c", "echo error_msg >&2; exit 1"], + check_returncode=False, + ) + assert "error_msg" in output + + @pytest.mark.asyncio + async def test_run_concurrent(self) -> None: + """Test running multiple commands concurrently.""" + + async def run_echo(i: int) -> str: + return await async_run(["echo", str(i)]) + + results = await asyncio.gather(*[run_echo(i) for i in range(5)]) + + assert len(results) == 5 + for i, result in enumerate(results): + assert result == str(i) + + +class TestWrapSyncCallback: + """Tests for wrap_sync_callback helper.""" + + @pytest.mark.asyncio + async def test_wrap_sync_callback(self) -> None: + """Test wrap_sync_callback creates working async wrapper.""" + calls: list[tuple[str, datetime.datetime]] = [] + + def sync_cb(output: str, timestamp: datetime.datetime) -> None: + calls.append((output, timestamp)) + + async_cb = wrap_sync_callback(sync_cb) + + # Verify it's a valid async callback + now = datetime.datetime.now() + await async_cb("test", now) + + assert len(calls) == 1 + assert calls[0] == ("test", now) + + @pytest.mark.asyncio + async def test_wrap_sync_callback_type(self) -> None: + """Test wrapped callback conforms to protocol.""" + + def sync_cb(output: str, timestamp: datetime.datetime) -> None: + pass + + async_cb = wrap_sync_callback(sync_cb) + + # Type check: should be usable where AsyncProgressCallbackProtocol expected + callback: AsyncProgressCallbackProtocol = async_cb + await callback("test", datetime.datetime.now()) + + +class TestAsyncProgressCallbackProtocol: + """Tests for AsyncProgressCallbackProtocol.""" + + @pytest.mark.asyncio + async def test_protocol_implementation(self) -> None: + """Test that a function can implement the protocol.""" + received: list[str] = [] + + async def my_callback(output: str, timestamp: datetime.datetime) -> None: + received.append(output) + + # Use as protocol type + cb: AsyncProgressCallbackProtocol = my_callback + await cb("hello", datetime.datetime.now()) + + assert received == ["hello"] + + +class TestArgsToList: + """Tests for _args_to_list helper.""" + + @pytest.mark.asyncio + async def test_string_arg(self) -> None: + """Test single string argument.""" + output = await async_run("echo") + assert output == "" + + @pytest.mark.asyncio + async def test_path_arg(self, tmp_path: Path) -> None: + """Test Path argument.""" + test_script = tmp_path / "test.sh" + test_script.write_text("#!/bin/sh\necho working") + test_script.chmod(0o755) + + output = await async_run(test_script) + assert output == "working" + + @pytest.mark.asyncio + async def test_bytes_arg(self) -> None: + """Test bytes argument.""" + output = await async_run([b"echo", b"bytes_test"]) + assert output == "bytes_test" diff --git a/tests/_internal/test_async_subprocess.py b/tests/_internal/test_async_subprocess.py new file mode 100644 index 000000000..0da733045 --- /dev/null +++ b/tests/_internal/test_async_subprocess.py @@ -0,0 +1,234 @@ +"""Tests for libvcs._internal.async_subprocess.""" + +from __future__ import annotations + +import asyncio +import subprocess +import typing as t +from pathlib import Path + +import pytest + +from libvcs._internal.async_subprocess import ( + AsyncCompletedProcess, + AsyncSubprocessCommand, +) + + +class RunFixture(t.NamedTuple): + """Test fixture for AsyncSubprocessCommand.run().""" + + test_id: str + args: list[str] + kwargs: dict[str, t.Any] + expected_stdout: str | None + expected_returncode: int + + +RUN_FIXTURES = [ + RunFixture( + test_id="echo_text", + args=["echo", "hello"], + kwargs={"text": True}, + expected_stdout="hello\n", + expected_returncode=0, + ), + RunFixture( + test_id="echo_bytes", + args=["echo", "hello"], + kwargs={"text": False}, + expected_stdout=None, # bytes comparison handled separately + expected_returncode=0, + ), + RunFixture( + test_id="true_command", + args=["true"], + kwargs={}, + expected_stdout=None, + expected_returncode=0, + ), + RunFixture( + test_id="false_command", + args=["false"], + kwargs={"check": False}, + expected_stdout=None, + expected_returncode=1, + ), +] + + +class TestAsyncCompletedProcess: + """Tests for AsyncCompletedProcess dataclass.""" + + def test_init(self) -> None: + """Test basic initialization.""" + result = AsyncCompletedProcess( + args=["echo", "test"], + returncode=0, + stdout="test\n", + stderr="", + ) + assert result.args == ["echo", "test"] + assert result.returncode == 0 + assert result.stdout == "test\n" + assert result.stderr == "" + + def test_check_returncode_success(self) -> None: + """Test check_returncode with zero exit code.""" + result = AsyncCompletedProcess( + args=["true"], + returncode=0, + ) + # Should not raise + result.check_returncode() + + def test_check_returncode_failure(self) -> None: + """Test check_returncode with non-zero exit code.""" + result = AsyncCompletedProcess( + args=["false"], + returncode=1, + stdout="", + stderr="error", + ) + with pytest.raises(subprocess.CalledProcessError) as exc_info: + result.check_returncode() + assert exc_info.value.returncode == 1 + assert exc_info.value.cmd == ["false"] + + +class TestAsyncSubprocessCommand: + """Tests for AsyncSubprocessCommand.""" + + def test_init(self) -> None: + """Test basic initialization.""" + cmd = AsyncSubprocessCommand(args=["echo", "hello"]) + assert cmd.args == ["echo", "hello"] + assert cmd.cwd is None + assert cmd.env is None + + def test_init_with_cwd(self, tmp_path: Path) -> None: + """Test initialization with working directory.""" + cmd = AsyncSubprocessCommand(args=["pwd"], cwd=tmp_path) + assert cmd.cwd == tmp_path + + def test_args_as_list_sequence(self) -> None: + """Test _args_as_list with sequence of strings.""" + cmd = AsyncSubprocessCommand(args=["echo", "hello", "world"]) + assert cmd._args_as_list() == ["echo", "hello", "world"] + + def test_args_as_list_single_string(self) -> None: + """Test _args_as_list with single string.""" + cmd = AsyncSubprocessCommand(args="echo") + assert cmd._args_as_list() == ["echo"] + + def test_args_as_list_path(self, tmp_path: Path) -> None: + """Test _args_as_list with Path object.""" + cmd = AsyncSubprocessCommand(args=tmp_path) + assert cmd._args_as_list() == [str(tmp_path)] + + @pytest.mark.parametrize( + list(RunFixture._fields), + RUN_FIXTURES, + ids=[f.test_id for f in RUN_FIXTURES], + ) + @pytest.mark.asyncio + async def test_run( + self, + test_id: str, + args: list[str], + kwargs: dict[str, t.Any], + expected_stdout: str | None, + expected_returncode: int, + ) -> None: + """Test run() with various commands.""" + cmd = AsyncSubprocessCommand(args=args) + result = await cmd.run(**kwargs) + + assert result.returncode == expected_returncode + if expected_stdout is not None: + assert result.stdout == expected_stdout + + @pytest.mark.asyncio + async def test_run_bytes_output(self) -> None: + """Test run() returns bytes when text=False.""" + cmd = AsyncSubprocessCommand(args=["echo", "hello"]) + result = await cmd.run(text=False) + + assert isinstance(result.stdout, bytes) + assert result.stdout == b"hello\n" + + @pytest.mark.asyncio + async def test_run_with_input(self) -> None: + """Test run() with stdin input.""" + cmd = AsyncSubprocessCommand(args=["cat"]) + result = await cmd.run(input="hello", text=True) + + assert result.stdout == "hello" + assert result.returncode == 0 + + @pytest.mark.asyncio + async def test_run_with_check_raises(self) -> None: + """Test run() with check=True raises on non-zero exit.""" + cmd = AsyncSubprocessCommand(args=["false"]) + with pytest.raises(subprocess.CalledProcessError) as exc_info: + await cmd.run(check=True) + assert exc_info.value.returncode == 1 + + @pytest.mark.asyncio + async def test_run_with_timeout(self) -> None: + """Test run() respects timeout.""" + cmd = AsyncSubprocessCommand(args=["sleep", "10"]) + with pytest.raises(asyncio.TimeoutError): + await cmd.run(timeout=0.1) + + @pytest.mark.asyncio + async def test_run_with_cwd(self, tmp_path: Path) -> None: + """Test run() uses specified working directory.""" + cmd = AsyncSubprocessCommand(args=["pwd"], cwd=tmp_path) + result = await cmd.run(text=True) + + assert result.stdout is not None + assert result.stdout.strip() == str(tmp_path) + + @pytest.mark.asyncio + async def test_check_output(self) -> None: + """Test check_output() returns stdout.""" + cmd = AsyncSubprocessCommand(args=["echo", "hello"]) + output = await cmd.check_output(text=True) + + assert output == "hello\n" + + @pytest.mark.asyncio + async def test_check_output_raises_on_error(self) -> None: + """Test check_output() raises on non-zero exit.""" + cmd = AsyncSubprocessCommand(args=["false"]) + with pytest.raises(subprocess.CalledProcessError): + await cmd.check_output() + + @pytest.mark.asyncio + async def test_wait(self) -> None: + """Test wait() returns exit code.""" + cmd = AsyncSubprocessCommand(args=["true"]) + returncode = await cmd.wait() + + assert returncode == 0 + + @pytest.mark.asyncio + async def test_wait_with_timeout(self) -> None: + """Test wait() respects timeout.""" + cmd = AsyncSubprocessCommand(args=["sleep", "10"]) + with pytest.raises(asyncio.TimeoutError): + await cmd.wait(timeout=0.1) + + @pytest.mark.asyncio + async def test_concurrent_commands(self) -> None: + """Test running multiple commands concurrently.""" + commands = [AsyncSubprocessCommand(args=["echo", str(i)]) for i in range(5)] + + results = await asyncio.gather(*[cmd.run(text=True) for cmd in commands]) + + assert len(results) == 5 + for i, result in enumerate(results): + assert result.stdout is not None + assert result.stdout.strip() == str(i) + assert result.returncode == 0 diff --git a/tests/_internal/test_copy.py b/tests/_internal/test_copy.py new file mode 100644 index 000000000..ee8b94a03 --- /dev/null +++ b/tests/_internal/test_copy.py @@ -0,0 +1,389 @@ +"""Tests for libvcs._internal.copy.""" + +from __future__ import annotations + +import shutil +import subprocess +import typing as t +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest + +from libvcs._internal.copy import _apply_ignore_patterns, copytree_reflink + +# ============================================================================= +# copytree_reflink Tests +# ============================================================================= + + +class CopyFixture(t.NamedTuple): + """Test fixture for copytree_reflink scenarios.""" + + test_id: str + setup_files: dict[str, str] # filename -> content + ignore_pattern: str | None + expected_files: list[str] + description: str + + +COPY_FIXTURES = [ + CopyFixture( + test_id="simple_copy", + setup_files={"file.txt": "hello", "subdir/nested.txt": "world"}, + ignore_pattern=None, + expected_files=["file.txt", "subdir/nested.txt"], + description="Copy all files without ignore patterns", + ), + CopyFixture( + test_id="ignore_pyc", + setup_files={ + "keep.py": "code", + "skip.pyc": "compiled", + "subdir/keep2.py": "more code", + "subdir/skip2.pyc": "also compiled", + }, + ignore_pattern="*.pyc", + expected_files=["keep.py", "subdir/keep2.py"], + description="Ignore .pyc files", + ), + CopyFixture( + test_id="ignore_directory", + setup_files={ + "keep.txt": "keep", + "__pycache__/cached.pyc": "cache", + "src/code.py": "code", + }, + ignore_pattern="__pycache__", + expected_files=["keep.txt", "src/code.py"], + description="Ignore __pycache__ directory", + ), + CopyFixture( + test_id="empty_directory", + setup_files={}, + ignore_pattern=None, + expected_files=[], + description="Copy empty directory", + ), +] + + +class TestCopytreeReflink: + """Tests for copytree_reflink function.""" + + @pytest.mark.parametrize( + list(CopyFixture._fields), + COPY_FIXTURES, + ids=[f.test_id for f in COPY_FIXTURES], + ) + def test_copy_scenarios( + self, + tmp_path: Path, + test_id: str, + setup_files: dict[str, str], + ignore_pattern: str | None, + expected_files: list[str], + description: str, + ) -> None: + """Test various copy scenarios.""" + # Setup source directory + src = tmp_path / "source" + src.mkdir() + for filename, content in setup_files.items(): + file_path = src / filename + file_path.parent.mkdir(parents=True, exist_ok=True) + file_path.write_text(content) + + dst = tmp_path / "dest" + ignore = shutil.ignore_patterns(ignore_pattern) if ignore_pattern else None + + # Perform copy + result = copytree_reflink(src, dst, ignore=ignore) + + # Verify result + assert result == dst + assert dst.exists() + + # Verify expected files exist + for expected_file in expected_files: + expected_path = dst / expected_file + assert expected_path.exists(), f"Expected {expected_file} to exist" + + # Verify ignored files don't exist (if pattern was provided) + if ignore_pattern and setup_files: + for filename in setup_files: + if filename.endswith(ignore_pattern.replace("*", "")): + file_exists = (dst / filename).exists() + assert not file_exists, f"{filename} should be ignored" + + def test_preserves_content(self, tmp_path: Path) -> None: + """Test that file contents are preserved.""" + src = tmp_path / "source" + src.mkdir() + (src / "file.txt").write_text("original content") + + dst = tmp_path / "dest" + copytree_reflink(src, dst) + + assert (dst / "file.txt").read_text() == "original content" + + def test_creates_parent_directories(self, tmp_path: Path) -> None: + """Test that parent directories are created if needed.""" + src = tmp_path / "source" + src.mkdir() + (src / "file.txt").write_text("content") + + dst = tmp_path / "deep" / "nested" / "dest" + copytree_reflink(src, dst) + + assert dst.exists() + assert (dst / "file.txt").exists() + + def test_fallback_on_cp_failure(self, tmp_path: Path) -> None: + """Test fallback to shutil.copytree when cp fails.""" + src = tmp_path / "source" + src.mkdir() + (src / "file.txt").write_text("content") + + dst = tmp_path / "dest" + + # Mock subprocess.run to simulate cp failure + with patch("subprocess.run") as mock_run: + mock_run.side_effect = subprocess.CalledProcessError(1, "cp") + result = copytree_reflink(src, dst) + + assert result == dst + assert (dst / "file.txt").exists() + + def test_fallback_on_cp_not_found(self, tmp_path: Path) -> None: + """Test fallback when cp command is not found (e.g., Windows).""" + src = tmp_path / "source" + src.mkdir() + (src / "file.txt").write_text("content") + + dst = tmp_path / "dest" + + # Mock subprocess.run to simulate FileNotFoundError + with patch("subprocess.run") as mock_run: + mock_run.side_effect = FileNotFoundError("cp not found") + result = copytree_reflink(src, dst) + + assert result == dst + assert (dst / "file.txt").exists() + + def test_fallback_on_os_error(self, tmp_path: Path) -> None: + """Test fallback on OSError.""" + src = tmp_path / "source" + src.mkdir() + (src / "file.txt").write_text("content") + + dst = tmp_path / "dest" + + # Mock subprocess.run to simulate OSError + with patch("subprocess.run") as mock_run: + mock_run.side_effect = OSError("Unexpected error") + result = copytree_reflink(src, dst) + + assert result == dst + assert (dst / "file.txt").exists() + + def test_uses_cp_reflink_auto(self, tmp_path: Path) -> None: + """Test that cp --reflink=auto is attempted first.""" + src = tmp_path / "source" + src.mkdir() + (src / "file.txt").write_text("content") + + dst = tmp_path / "dest" + + with patch("subprocess.run") as mock_run: + # Simulate successful cp + mock_run.return_value = MagicMock(returncode=0) + copytree_reflink(src, dst) + + # Verify cp was called with correct arguments + mock_run.assert_called_once() + call_args = mock_run.call_args + assert call_args[0][0] == ["cp", "-a", "--reflink=auto", str(src), str(dst)] + + def test_returns_pathlib_path(self, tmp_path: Path) -> None: + """Test that result is always a pathlib.Path.""" + src = tmp_path / "source" + src.mkdir() + + dst = tmp_path / "dest" + result = copytree_reflink(src, dst) + + assert isinstance(result, Path) + + +# ============================================================================= +# _apply_ignore_patterns Tests +# ============================================================================= + + +class IgnorePatternFixture(t.NamedTuple): + """Test fixture for ignore pattern scenarios.""" + + test_id: str + setup_files: list[str] + ignore_pattern: str + expected_remaining: list[str] + description: str + + +IGNORE_PATTERN_FIXTURES = [ + IgnorePatternFixture( + test_id="ignore_pyc", + setup_files=["keep.py", "skip.pyc"], + ignore_pattern="*.pyc", + expected_remaining=["keep.py"], + description="Remove .pyc files", + ), + IgnorePatternFixture( + test_id="ignore_directory", + setup_files=["keep.txt", "__pycache__/file.pyc"], + ignore_pattern="__pycache__", + expected_remaining=["keep.txt"], + description="Remove __pycache__ directory", + ), + IgnorePatternFixture( + test_id="nested_pattern", + setup_files=["a/keep.txt", "a/b/skip.tmp", "a/c/keep2.txt"], + ignore_pattern="*.tmp", + expected_remaining=["a/keep.txt", "a/c/keep2.txt"], + description="Remove nested .tmp files", + ), +] + + +class TestApplyIgnorePatterns: + """Tests for _apply_ignore_patterns function.""" + + @pytest.mark.parametrize( + list(IgnorePatternFixture._fields), + IGNORE_PATTERN_FIXTURES, + ids=[f.test_id for f in IGNORE_PATTERN_FIXTURES], + ) + def test_ignore_pattern_scenarios( + self, + tmp_path: Path, + test_id: str, + setup_files: list[str], + ignore_pattern: str, + expected_remaining: list[str], + description: str, + ) -> None: + """Test various ignore pattern scenarios.""" + # Setup directory with files + for filepath in setup_files: + file_path = tmp_path / filepath + file_path.parent.mkdir(parents=True, exist_ok=True) + file_path.write_text("content") + + # Apply ignore patterns + ignore = shutil.ignore_patterns(ignore_pattern) + _apply_ignore_patterns(tmp_path, ignore) + + # Verify expected files remain + for expected in expected_remaining: + assert (tmp_path / expected).exists(), f"Expected {expected} to remain" + + # Verify ignored files are removed + for filepath in setup_files: + if filepath not in expected_remaining: + # Check if file or any parent directory was removed + full_path = tmp_path / filepath + assert not full_path.exists(), f"Expected {filepath} to be removed" + + def test_empty_directory(self, tmp_path: Path) -> None: + """Test ignore patterns on empty directory.""" + ignore = shutil.ignore_patterns("*.pyc") + # Should not raise + _apply_ignore_patterns(tmp_path, ignore) + + def test_no_matches(self, tmp_path: Path) -> None: + """Test when no files match ignore pattern.""" + (tmp_path / "file.txt").write_text("content") + (tmp_path / "other.py").write_text("code") + + ignore = shutil.ignore_patterns("*.pyc") + _apply_ignore_patterns(tmp_path, ignore) + + # All files should remain + assert (tmp_path / "file.txt").exists() + assert (tmp_path / "other.py").exists() + + +# ============================================================================= +# Integration Tests +# ============================================================================= + + +class TestCopyIntegration: + """Integration tests for copy operations.""" + + def test_copy_git_like_structure(self, tmp_path: Path) -> None: + """Test copying a git-like directory structure.""" + src = tmp_path / "repo" + src.mkdir() + + # Create git-like structure + (src / ".git" / "HEAD").parent.mkdir(parents=True) + (src / ".git" / "HEAD").write_text("ref: refs/heads/main") + (src / ".git" / "config").write_text("[core]\nrepositoryformatversion = 0") + (src / "README.md").write_text("# Project") + (src / "src" / "main.py").parent.mkdir(parents=True) + (src / "src" / "main.py").write_text("print('hello')") + + dst = tmp_path / "clone" + copytree_reflink(src, dst) + + # Verify structure + assert (dst / ".git" / "HEAD").exists() + assert (dst / ".git" / "config").exists() + assert (dst / "README.md").exists() + assert (dst / "src" / "main.py").exists() + + # Verify content + assert (dst / ".git" / "HEAD").read_text() == "ref: refs/heads/main" + assert (dst / "README.md").read_text() == "# Project" + + def test_copy_with_marker_file_ignore(self, tmp_path: Path) -> None: + """Test ignoring marker files like fixtures do.""" + src = tmp_path / "master" + src.mkdir() + + (src / ".libvcs_master_initialized").write_text("") + (src / ".git" / "HEAD").parent.mkdir(parents=True) + (src / ".git" / "HEAD").write_text("ref: refs/heads/main") + (src / "file.txt").write_text("content") + + dst = tmp_path / "workspace" + copytree_reflink( + src, + dst, + ignore=shutil.ignore_patterns(".libvcs_master_initialized"), + ) + + # Marker file should be ignored + assert not (dst / ".libvcs_master_initialized").exists() + + # Other files should be copied + assert (dst / ".git" / "HEAD").exists() + assert (dst / "file.txt").exists() + + def test_workspace_is_writable(self, tmp_path: Path) -> None: + """Test that copied files are writable (important for test fixtures).""" + src = tmp_path / "source" + src.mkdir() + (src / "file.txt").write_text("original") + + dst = tmp_path / "dest" + copytree_reflink(src, dst) + + # Modify copied file (tests should be able to do this) + (dst / "file.txt").write_text("modified") + assert (dst / "file.txt").read_text() == "modified" + + # Original should be unchanged + assert (src / "file.txt").read_text() == "original" diff --git a/tests/_internal/test_file_lock.py b/tests/_internal/test_file_lock.py new file mode 100644 index 000000000..dc96204b7 --- /dev/null +++ b/tests/_internal/test_file_lock.py @@ -0,0 +1,693 @@ +"""Tests for libvcs._internal.file_lock.""" + +from __future__ import annotations + +import asyncio +import os +import pickle # Used to test exception picklability for multiprocessing support +import threading +import time +import typing as t +from pathlib import Path + +import pytest + +from libvcs._internal.file_lock import ( + AcquireReturnProxy, + AsyncAcquireReturnProxy, + AsyncFileLock, + FileLock, + FileLockContext, + FileLockError, + FileLockStale, + FileLockTimeout, + async_atomic_init, + atomic_init, +) + +# ============================================================================= +# FileLock Sync Tests +# ============================================================================= + + +class LockAcquireFixture(t.NamedTuple): + """Test fixture for FileLock acquisition scenarios.""" + + test_id: str + timeout: float + should_acquire: bool + description: str + + +LOCK_ACQUIRE_FIXTURES = [ + LockAcquireFixture( + test_id="default_timeout", + timeout=-1.0, + should_acquire=True, + description="Acquire with infinite timeout", + ), + LockAcquireFixture( + test_id="explicit_timeout", + timeout=5.0, + should_acquire=True, + description="Acquire with 5s timeout", + ), + LockAcquireFixture( + test_id="zero_timeout", + timeout=0.0, + should_acquire=True, + description="Non-blocking acquire on free lock", + ), +] + + +class TestFileLock: + """Tests for FileLock synchronous operations.""" + + @pytest.mark.parametrize( + list(LockAcquireFixture._fields), + LOCK_ACQUIRE_FIXTURES, + ids=[f.test_id for f in LOCK_ACQUIRE_FIXTURES], + ) + def test_acquire_scenarios( + self, + tmp_path: Path, + test_id: str, + timeout: float, + should_acquire: bool, + description: str, + ) -> None: + """Test various lock acquisition scenarios.""" + lock_path = tmp_path / "test.lock" + lock = FileLock(lock_path, timeout=timeout) + + if should_acquire: + with lock: + assert lock.is_locked + assert lock.lock_counter == 1 + assert not lock.is_locked + + def test_context_manager(self, tmp_path: Path) -> None: + """Test FileLock as context manager.""" + lock_path = tmp_path / "test.lock" + lock = FileLock(lock_path) + + assert not lock.is_locked + with lock: + assert lock.is_locked + assert lock_path.exists() + assert not lock.is_locked + assert not lock_path.exists() + + def test_explicit_acquire_release(self, tmp_path: Path) -> None: + """Test explicit acquire() and release().""" + lock_path = tmp_path / "test.lock" + lock = FileLock(lock_path) + + proxy = lock.acquire() + assert isinstance(proxy, AcquireReturnProxy) + assert lock.is_locked + assert lock.lock_counter == 1 + + lock.release() + assert not lock.is_locked + assert lock.lock_counter == 0 + + def test_reentrant_locking(self, tmp_path: Path) -> None: + """Test reentrant lock acquisition.""" + lock_path = tmp_path / "test.lock" + lock = FileLock(lock_path) + + lock.acquire() + assert lock.lock_counter == 1 + + lock.acquire() + assert lock.lock_counter == 2 + + lock.acquire() + assert lock.lock_counter == 3 + + lock.release() + assert lock.lock_counter == 2 + assert lock.is_locked + + lock.release() + assert lock.lock_counter == 1 + assert lock.is_locked + + lock.release() + assert lock.lock_counter == 0 + assert not lock.is_locked + + def test_force_release(self, tmp_path: Path) -> None: + """Test force=True releases regardless of counter.""" + lock_path = tmp_path / "test.lock" + lock = FileLock(lock_path) + + lock.acquire() + lock.acquire() + lock.acquire() + assert lock.lock_counter == 3 + + lock.release(force=True) + assert lock.lock_counter == 0 + assert not lock.is_locked + + def test_non_blocking_acquire(self, tmp_path: Path) -> None: + """Test non-blocking acquire with blocking=False.""" + lock_path = tmp_path / "test.lock" + + # First lock acquires + lock1 = FileLock(lock_path) + lock1.acquire() + + # Second lock should fail immediately + lock2 = FileLock(lock_path) + with pytest.raises(FileLockTimeout): + lock2.acquire(blocking=False) + + lock1.release() + + def test_timeout_on_held_lock(self, tmp_path: Path) -> None: + """Test timeout when lock is held by another process.""" + lock_path = tmp_path / "test.lock" + + # Hold the lock + lock1 = FileLock(lock_path) + lock1.acquire() + + # Try to acquire with short timeout + lock2 = FileLock(lock_path, timeout=0.1) + with pytest.raises(FileLockTimeout) as exc_info: + lock2.acquire() + + assert exc_info.value.lock_file == str(lock_path) + assert exc_info.value.timeout == 0.1 + + lock1.release() + + def test_pid_written_to_lock_file(self, tmp_path: Path) -> None: + """Test PID is written to lock file for debugging.""" + lock_path = tmp_path / "test.lock" + lock = FileLock(lock_path) + + with lock: + content = lock_path.read_text() + assert content == str(os.getpid()) + + def test_acquire_return_proxy_context(self, tmp_path: Path) -> None: + """Test AcquireReturnProxy as context manager.""" + lock_path = tmp_path / "test.lock" + lock = FileLock(lock_path) + + with lock.acquire() as acquired_lock: + assert acquired_lock is lock + assert lock.is_locked + + assert not lock.is_locked + + def test_lock_file_property(self, tmp_path: Path) -> None: + """Test lock_file property returns path.""" + lock_path = tmp_path / "test.lock" + lock = FileLock(lock_path) + + assert lock.lock_file == str(lock_path) + + def test_repr(self, tmp_path: Path) -> None: + """Test __repr__ shows lock state.""" + lock_path = tmp_path / "test.lock" + lock = FileLock(lock_path) + + assert "unlocked" in repr(lock) + with lock: + assert "locked" in repr(lock) + + +class StaleLockFixture(t.NamedTuple): + """Test fixture for stale lock scenarios.""" + + test_id: str + stale_timeout: float + sleep_time: float + should_acquire: bool + + +STALE_LOCK_FIXTURES = [ + StaleLockFixture( + test_id="fresh_lock_blocks", + stale_timeout=1.0, + sleep_time=0.0, + should_acquire=False, + ), + StaleLockFixture( + test_id="stale_lock_acquired", + stale_timeout=0.1, + sleep_time=0.2, + should_acquire=True, + ), +] + + +class TestFileLockStaleDetection: + """Tests for stale lock detection and removal.""" + + @pytest.mark.parametrize( + list(StaleLockFixture._fields), + STALE_LOCK_FIXTURES, + ids=[f.test_id for f in STALE_LOCK_FIXTURES], + ) + def test_stale_detection( + self, + tmp_path: Path, + test_id: str, + stale_timeout: float, + sleep_time: float, + should_acquire: bool, + ) -> None: + """Test stale lock detection scenarios.""" + lock_path = tmp_path / "test.lock" + + # Create a lock file manually (simulating orphaned lock) + lock_path.parent.mkdir(parents=True, exist_ok=True) + lock_path.write_text(str(os.getpid())) + + if sleep_time > 0: + time.sleep(sleep_time) + + lock = FileLock(lock_path, timeout=0.05, stale_timeout=stale_timeout) + + if should_acquire: + with lock: + assert lock.is_locked + else: + with pytest.raises(FileLockTimeout): + lock.acquire() + + +# ============================================================================= +# AsyncFileLock Tests +# ============================================================================= + + +class TestAsyncFileLock: + """Tests for AsyncFileLock asynchronous operations.""" + + @pytest.mark.asyncio + async def test_async_context_manager(self, tmp_path: Path) -> None: + """Test AsyncFileLock as async context manager.""" + lock_path = tmp_path / "test.lock" + lock = AsyncFileLock(lock_path) + + assert not lock.is_locked + async with lock: + assert lock.is_locked + assert lock_path.exists() + assert not lock.is_locked + + @pytest.mark.asyncio + async def test_async_explicit_acquire_release(self, tmp_path: Path) -> None: + """Test explicit acquire() and release() for async lock.""" + lock_path = tmp_path / "test.lock" + lock = AsyncFileLock(lock_path) + + proxy = await lock.acquire() + assert isinstance(proxy, AsyncAcquireReturnProxy) + assert lock.is_locked + + await lock.release() + assert not lock.is_locked + + @pytest.mark.asyncio + async def test_async_reentrant(self, tmp_path: Path) -> None: + """Test async reentrant locking.""" + lock_path = tmp_path / "test.lock" + lock = AsyncFileLock(lock_path) + + await lock.acquire() + assert lock.lock_counter == 1 + + await lock.acquire() + assert lock.lock_counter == 2 + + await lock.release() + assert lock.lock_counter == 1 + + await lock.release() + assert lock.lock_counter == 0 + + @pytest.mark.asyncio + async def test_async_timeout(self, tmp_path: Path) -> None: + """Test async lock timeout.""" + lock_path = tmp_path / "test.lock" + + lock1 = AsyncFileLock(lock_path) + await lock1.acquire() + + lock2 = AsyncFileLock(lock_path, timeout=0.1) + with pytest.raises(FileLockTimeout): + await lock2.acquire() + + await lock1.release() + + @pytest.mark.asyncio + async def test_async_non_blocking(self, tmp_path: Path) -> None: + """Test async non-blocking acquire.""" + lock_path = tmp_path / "test.lock" + + lock1 = AsyncFileLock(lock_path) + await lock1.acquire() + + lock2 = AsyncFileLock(lock_path) + with pytest.raises(FileLockTimeout): + await lock2.acquire(blocking=False) + + await lock1.release() + + @pytest.mark.asyncio + async def test_async_acquire_proxy_context(self, tmp_path: Path) -> None: + """Test AsyncAcquireReturnProxy as async context manager.""" + lock_path = tmp_path / "test.lock" + lock = AsyncFileLock(lock_path) + + proxy = await lock.acquire() + async with proxy as acquired_lock: + assert acquired_lock is lock + assert lock.is_locked + + assert not lock.is_locked + + @pytest.mark.asyncio + async def test_async_concurrent_acquisition(self, tmp_path: Path) -> None: + """Test concurrent async lock acquisition.""" + lock_path = tmp_path / "test.lock" + results: list[int] = [] + + async def worker(lock: AsyncFileLock, worker_id: int) -> None: + async with lock: + results.append(worker_id) + await asyncio.sleep(0.01) + + lock = AsyncFileLock(lock_path) + await asyncio.gather(*[worker(lock, i) for i in range(3)]) + + # All workers should have completed + assert len(results) == 3 + # Results should be sequential (one at a time) + assert sorted(results) == list(range(3)) + + @pytest.mark.asyncio + async def test_async_repr(self, tmp_path: Path) -> None: + """Test __repr__ for async lock.""" + lock_path = tmp_path / "test.lock" + lock = AsyncFileLock(lock_path) + + assert "unlocked" in repr(lock) + async with lock: + assert "locked" in repr(lock) + + +# ============================================================================= +# FileLockContext Tests +# ============================================================================= + + +class TestFileLockContext: + """Tests for FileLockContext dataclass.""" + + def test_default_values(self) -> None: + """Test default values are set correctly.""" + ctx = FileLockContext("/tmp/test.lock") + + assert ctx.lock_file == "/tmp/test.lock" + assert ctx.timeout == -1.0 + assert ctx.poll_interval == 0.05 + assert ctx.stale_timeout == 300.0 + assert ctx.mode == 0o644 + assert ctx.lock_file_fd is None + assert ctx.lock_counter == 0 + + def test_is_locked_property(self) -> None: + """Test is_locked property.""" + ctx = FileLockContext("/tmp/test.lock") + + assert not ctx.is_locked + + ctx.lock_file_fd = 5 + assert ctx.is_locked + + +# ============================================================================= +# Exception Tests +# ============================================================================= + + +class TestExceptions: + """Tests for exception classes.""" + + def test_file_lock_timeout_message(self) -> None: + """Test FileLockTimeout message format.""" + exc = FileLockTimeout("/tmp/test.lock", 30.0) + + assert str(exc) == "Timeout (30.0s) waiting for lock: /tmp/test.lock" + assert exc.lock_file == "/tmp/test.lock" + assert exc.timeout == 30.0 + + def test_file_lock_timeout_inheritance(self) -> None: + """Test FileLockTimeout inherits from TimeoutError.""" + exc = FileLockTimeout("/tmp/test.lock", 30.0) + + assert isinstance(exc, FileLockError) + assert isinstance(exc, TimeoutError) + + def test_file_lock_timeout_picklable(self) -> None: + """Test FileLockTimeout is picklable for multiprocessing support.""" + exc = FileLockTimeout("/tmp/test.lock", 30.0) + pickled = pickle.dumps(exc) + restored = pickle.loads(pickled) + + assert restored.lock_file == exc.lock_file + assert restored.timeout == exc.timeout + + def test_file_lock_stale_message(self) -> None: + """Test FileLockStale message format.""" + exc = FileLockStale("/tmp/test.lock", 3600.0) + + assert str(exc) == "Stale lock (3600.0s old): /tmp/test.lock" + assert exc.lock_file == "/tmp/test.lock" + assert exc.age_seconds == 3600.0 + + def test_file_lock_stale_picklable(self) -> None: + """Test FileLockStale is picklable for multiprocessing support.""" + exc = FileLockStale("/tmp/test.lock", 3600.0) + pickled = pickle.dumps(exc) + restored = pickle.loads(pickled) + + assert restored.lock_file == exc.lock_file + assert restored.age_seconds == exc.age_seconds + + +# ============================================================================= +# atomic_init Tests +# ============================================================================= + + +class AtomicInitFixture(t.NamedTuple): + """Test fixture for atomic_init scenarios.""" + + test_id: str + pre_initialized: bool + expected_result: bool + description: str + + +ATOMIC_INIT_FIXTURES = [ + AtomicInitFixture( + test_id="first_init", + pre_initialized=False, + expected_result=True, + description="First call performs initialization", + ), + AtomicInitFixture( + test_id="already_initialized", + pre_initialized=True, + expected_result=False, + description="Already initialized returns False", + ), +] + + +class TestAtomicInit: + """Tests for atomic_init function.""" + + @pytest.mark.parametrize( + list(AtomicInitFixture._fields), + ATOMIC_INIT_FIXTURES, + ids=[f.test_id for f in ATOMIC_INIT_FIXTURES], + ) + def test_atomic_init_scenarios( + self, + tmp_path: Path, + test_id: str, + pre_initialized: bool, + expected_result: bool, + description: str, + ) -> None: + """Test atomic_init return values.""" + resource_path = tmp_path / "resource" + resource_path.mkdir() + marker = resource_path / ".initialized" + + if pre_initialized: + marker.touch() + + init_called = [] + + def init_fn() -> None: + init_called.append(True) + + result = atomic_init(resource_path, init_fn) + + assert result == expected_result + if expected_result: + assert len(init_called) == 1 + else: + assert len(init_called) == 0 + assert marker.exists() + + def test_atomic_init_creates_marker(self, tmp_path: Path) -> None: + """Test atomic_init creates marker file.""" + resource_path = tmp_path / "resource" + resource_path.mkdir() + + atomic_init(resource_path, lambda: None) + + assert (resource_path / ".initialized").exists() + + def test_atomic_init_custom_marker(self, tmp_path: Path) -> None: + """Test atomic_init with custom marker name.""" + resource_path = tmp_path / "resource" + resource_path.mkdir() + + atomic_init(resource_path, lambda: None, marker_name=".custom_marker") + + assert (resource_path / ".custom_marker").exists() + assert not (resource_path / ".initialized").exists() + + def test_atomic_init_cleans_partial_state(self, tmp_path: Path) -> None: + """Test atomic_init cleans partial state before init.""" + resource_path = tmp_path / "resource" + resource_path.mkdir() + # Create partial state (no marker) + (resource_path / "partial_file.txt").write_text("partial") + + def init_fn() -> None: + (resource_path / "complete_file.txt").write_text("complete") + + atomic_init(resource_path, init_fn) + + # Partial file should be gone, complete file should exist + assert not (resource_path / "partial_file.txt").exists() + assert (resource_path / "complete_file.txt").exists() + + def test_atomic_init_concurrent(self, tmp_path: Path) -> None: + """Test atomic_init handles concurrent calls.""" + resource_path = tmp_path / "resource" + resource_path.mkdir() + init_count = {"count": 0} + lock = threading.Lock() + + def init_fn() -> None: + with lock: + init_count["count"] += 1 + time.sleep(0.1) # Simulate slow init + + threads = [] + for _ in range(5): + t = threading.Thread(target=atomic_init, args=(resource_path, init_fn)) + threads.append(t) + + for t in threads: + t.start() + for t in threads: + t.join() + + # Only one thread should have initialized + assert init_count["count"] == 1 + + +# ============================================================================= +# async_atomic_init Tests +# ============================================================================= + + +class TestAsyncAtomicInit: + """Tests for async_atomic_init function.""" + + @pytest.mark.asyncio + async def test_async_atomic_init_first(self, tmp_path: Path) -> None: + """Test first async_atomic_init performs initialization.""" + resource_path = tmp_path / "resource" + resource_path.mkdir() + init_called = [] + + async def async_init_fn() -> None: + init_called.append(True) + await asyncio.sleep(0) + + result = await async_atomic_init(resource_path, async_init_fn) + + assert result is True + assert len(init_called) == 1 + assert (resource_path / ".initialized").exists() + + @pytest.mark.asyncio + async def test_async_atomic_init_already_done(self, tmp_path: Path) -> None: + """Test async_atomic_init skips when already initialized.""" + resource_path = tmp_path / "resource" + resource_path.mkdir() + (resource_path / ".initialized").touch() + + init_called = [] + + async def async_init_fn() -> None: + init_called.append(True) + + result = await async_atomic_init(resource_path, async_init_fn) + + assert result is False + assert len(init_called) == 0 + + @pytest.mark.asyncio + async def test_async_atomic_init_sync_fn(self, tmp_path: Path) -> None: + """Test async_atomic_init works with sync init function.""" + resource_path = tmp_path / "resource" + resource_path.mkdir() + init_called = [] + + def sync_init_fn() -> None: + init_called.append(True) + + result = await async_atomic_init(resource_path, sync_init_fn) + + assert result is True + assert len(init_called) == 1 + + @pytest.mark.asyncio + async def test_async_atomic_init_concurrent(self, tmp_path: Path) -> None: + """Test async_atomic_init handles concurrent calls.""" + resource_path = tmp_path / "resource" + resource_path.mkdir() + init_count = {"count": 0} + + async def init_fn() -> None: + init_count["count"] += 1 + await asyncio.sleep(0.1) # Simulate slow init + + results = await asyncio.gather( + *[async_atomic_init(resource_path, init_fn) for _ in range(5)] + ) + + # Only one should have returned True + assert sum(results) == 1 + # Only one init should have run + assert init_count["count"] == 1 diff --git a/tests/cmd/_async/__init__.py b/tests/cmd/_async/__init__.py new file mode 100644 index 000000000..5414db4b9 --- /dev/null +++ b/tests/cmd/_async/__init__.py @@ -0,0 +1,3 @@ +"""Tests for async command classes.""" + +from __future__ import annotations diff --git a/tests/cmd/_async/test_git.py b/tests/cmd/_async/test_git.py new file mode 100644 index 000000000..636c00c71 --- /dev/null +++ b/tests/cmd/_async/test_git.py @@ -0,0 +1,645 @@ +"""Tests for libvcs.cmd._async.git.""" + +from __future__ import annotations + +import asyncio +import datetime +import typing as t +from pathlib import Path + +import pytest + +from libvcs.cmd._async.git import ( + AsyncGit, +) +from libvcs.pytest_plugin import CreateRepoPytestFixtureFn +from libvcs.sync.git import GitSync + + +class RunFixture(t.NamedTuple): + """Test fixture for AsyncGit.run().""" + + test_id: str + args: list[str] + kwargs: dict[str, t.Any] + expected_in_output: str | None + + +RUN_FIXTURES = [ + RunFixture( + test_id="version", + args=["version"], + kwargs={}, + expected_in_output="git version", + ), + RunFixture( + test_id="help_short", + args=["--help"], + kwargs={}, + expected_in_output="usage: git", + ), +] + + +class TestAsyncGit: + """Tests for AsyncGit class.""" + + def test_init(self, tmp_path: Path) -> None: + """Test AsyncGit initialization.""" + git = AsyncGit(path=tmp_path) + assert git.path == tmp_path + assert git.progress_callback is None + + def test_repr(self, tmp_path: Path) -> None: + """Test AsyncGit repr.""" + git = AsyncGit(path=tmp_path) + assert "AsyncGit" in repr(git) + assert str(tmp_path) in repr(git) + + @pytest.mark.parametrize( + list(RunFixture._fields), + RUN_FIXTURES, + ids=[f.test_id for f in RUN_FIXTURES], + ) + @pytest.mark.asyncio + async def test_run( + self, + test_id: str, + args: list[str], + kwargs: dict[str, t.Any], + expected_in_output: str | None, + tmp_path: Path, + ) -> None: + """Test AsyncGit.run() with various commands.""" + git = AsyncGit(path=tmp_path) + output = await git.run(args, **kwargs) + if expected_in_output is not None: + assert expected_in_output in output + + @pytest.mark.asyncio + async def test_run_with_config( + self, + git_repo: GitSync, + ) -> None: + """Test AsyncGit.run() with config options.""" + git = AsyncGit(path=git_repo.path) + output = await git.run( + ["config", "user.name"], + config={"user.name": "Test User"}, + ) + # Config is passed, command runs + assert output is not None + + @pytest.mark.asyncio + async def test_version(self, tmp_path: Path) -> None: + """Test AsyncGit.version().""" + git = AsyncGit(path=tmp_path) + version = await git.version() + assert "git version" in version + + +class TestAsyncGitClone: + """Tests for AsyncGit.clone().""" + + @pytest.mark.asyncio + async def test_clone_basic( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic clone operation.""" + remote_repo = create_git_remote_repo() + clone_path = tmp_path / "cloned_repo" + git = AsyncGit(path=clone_path) + + await git.clone(url=f"file://{remote_repo}") + + assert clone_path.exists() + assert (clone_path / ".git").exists() + + @pytest.mark.asyncio + async def test_clone_with_depth( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test shallow clone with depth parameter.""" + remote_repo = create_git_remote_repo() + clone_path = tmp_path / "shallow_repo" + git = AsyncGit(path=clone_path) + + await git.clone(url=f"file://{remote_repo}", depth=1) + + assert clone_path.exists() + assert (clone_path / ".git").exists() + # Note: .git/shallow file only exists if there's more history to truncate + # For a single-commit repo, depth=1 still works but no shallow file is created + + @pytest.mark.asyncio + async def test_clone_make_parents( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test clone creates parent directories.""" + remote_repo = create_git_remote_repo() + clone_path = tmp_path / "deep" / "nested" / "repo" + git = AsyncGit(path=clone_path) + + await git.clone(url=f"file://{remote_repo}", make_parents=True) + + assert clone_path.exists() + + +class TestAsyncGitFetch: + """Tests for AsyncGit.fetch().""" + + @pytest.mark.asyncio + async def test_fetch_basic( + self, + git_repo: GitSync, + ) -> None: + """Test basic fetch.""" + git = AsyncGit(path=git_repo.path) + # Fetch from origin + output = await git.fetch() + # Should succeed (might be empty if nothing to fetch) + assert output is not None + + +class TestAsyncGitStatus: + """Tests for AsyncGit.status().""" + + @pytest.mark.asyncio + async def test_status_clean( + self, + git_repo: GitSync, + ) -> None: + """Test status on clean repo.""" + git = AsyncGit(path=git_repo.path) + status = await git.status() + # Clean repo status + assert status is not None + + @pytest.mark.asyncio + async def test_status_porcelain( + self, + git_repo: GitSync, + ) -> None: + """Test porcelain status format.""" + git = AsyncGit(path=git_repo.path) + status = await git.status(porcelain=True) + # Clean repo should have empty porcelain output + assert status == "" + + @pytest.mark.asyncio + async def test_status_with_changes( + self, + git_repo: GitSync, + ) -> None: + """Test status with uncommitted changes.""" + # Create a new file + new_file = git_repo.path / "new_file.txt" + new_file.write_text("test content") + + git = AsyncGit(path=git_repo.path) + status = await git.status(porcelain=True) + # Should show untracked file + assert "new_file.txt" in status + + +class TestAsyncGitCheckout: + """Tests for AsyncGit.checkout().""" + + @pytest.mark.asyncio + async def test_checkout_branch( + self, + git_repo: GitSync, + ) -> None: + """Test checkout existing branch.""" + git = AsyncGit(path=git_repo.path) + # Get current branch first + current = await git.symbolic_ref(name="HEAD", short=True) + # Checkout same branch (should succeed) + await git.checkout(branch=current.strip()) + + +class TestAsyncGitRevParse: + """Tests for AsyncGit.rev_parse().""" + + @pytest.mark.asyncio + async def test_rev_parse_head( + self, + git_repo: GitSync, + ) -> None: + """Test rev-parse HEAD.""" + git = AsyncGit(path=git_repo.path) + sha = await git.rev_parse(args="HEAD", verify=True) + assert sha.strip() + assert len(sha.strip()) == 40 # Full SHA-1 + + @pytest.mark.asyncio + async def test_rev_parse_show_toplevel( + self, + git_repo: GitSync, + ) -> None: + """Test rev-parse --show-toplevel.""" + git = AsyncGit(path=git_repo.path) + toplevel = await git.rev_parse(show_toplevel=True) + assert toplevel.strip() == str(git_repo.path) + + +class TestAsyncGitSymbolicRef: + """Tests for AsyncGit.symbolic_ref().""" + + @pytest.mark.asyncio + async def test_symbolic_ref_head( + self, + git_repo: GitSync, + ) -> None: + """Test symbolic-ref HEAD.""" + git = AsyncGit(path=git_repo.path) + ref = await git.symbolic_ref(name="HEAD") + assert "refs/heads/" in ref + + +class TestAsyncGitRemoteManager: + """Tests for AsyncGitRemoteManager.""" + + @pytest.mark.asyncio + async def test_ls_remotes( + self, + git_repo: GitSync, + ) -> None: + """Test listing remotes.""" + git = AsyncGit(path=git_repo.path) + remotes = await git.remotes.ls() + assert "origin" in remotes + + @pytest.mark.asyncio + async def test_add_and_remove_remote( + self, + git_repo: GitSync, + ) -> None: + """Test adding and removing a remote.""" + git = AsyncGit(path=git_repo.path) + + # Add a new remote + await git.remotes.add(name="test_remote", url="file:///dev/null") + + # Verify it's in the list + remotes = await git.remotes.ls() + assert "test_remote" in remotes + + # Remove it + await git.remotes.remove(name="test_remote") + + # Verify it's gone + remotes = await git.remotes.ls() + assert "test_remote" not in remotes + + @pytest.mark.asyncio + async def test_get_url( + self, + git_repo: GitSync, + ) -> None: + """Test getting remote URL.""" + git = AsyncGit(path=git_repo.path) + url = await git.remotes.get_url(name="origin") + assert url.strip() # Should have some URL + + +class TestAsyncGitStashCmd: + """Tests for AsyncGitStashCmd.""" + + @pytest.mark.asyncio + async def test_stash_ls_empty( + self, + git_repo: GitSync, + ) -> None: + """Test listing empty stash.""" + git = AsyncGit(path=git_repo.path) + stashes = await git.stash.ls() + # Empty stash list + assert stashes == "" + + @pytest.mark.asyncio + async def test_stash_save_no_changes( + self, + git_repo: GitSync, + ) -> None: + """Test stash save with no changes.""" + git = AsyncGit(path=git_repo.path) + # No changes to stash - command runs but reports nothing to save + output = await git.stash.save(message="Test stash") + assert "No local changes to save" in output + + +class TestAsyncGitReset: + """Tests for AsyncGit.reset().""" + + @pytest.mark.asyncio + async def test_reset_soft( + self, + git_repo: GitSync, + ) -> None: + """Test soft reset.""" + git = AsyncGit(path=git_repo.path) + # Create a file and commit it + test_file = git_repo.path / "reset_test.txt" + test_file.write_text("initial content") + await git.run(["add", "reset_test.txt"]) + await git.run(["commit", "-m", "test commit"]) + + # Soft reset to HEAD~1 + output = await git.reset(pathspec="HEAD~1", soft=True) + # Soft reset should succeed + assert output is not None + + @pytest.mark.asyncio + async def test_reset_mixed( + self, + git_repo: GitSync, + ) -> None: + """Test mixed reset (default mode).""" + git = AsyncGit(path=git_repo.path) + # Create and stage a file + test_file = git_repo.path / "staged_file.txt" + test_file.write_text("staged content") + await git.run(["add", "staged_file.txt"]) + + # Mixed reset to unstage + output = await git.reset(mixed=True) + assert output is not None + + # Verify file is unstaged + status = await git.status(porcelain=True) + assert "??" in status # Untracked marker + + @pytest.mark.asyncio + async def test_reset_hard( + self, + git_repo: GitSync, + ) -> None: + """Test hard reset discards changes.""" + git = AsyncGit(path=git_repo.path) + # Modify a tracked file + test_file = git_repo.path / "hard_reset.txt" + test_file.write_text("original") + await git.run(["add", "hard_reset.txt"]) + await git.run(["commit", "-m", "add file"]) + + # Modify the file + test_file.write_text("modified") + + # Hard reset should discard changes + await git.reset(pathspec="HEAD", hard=True) + + # File should be back to original + assert test_file.read_text() == "original" + + @pytest.mark.asyncio + async def test_reset_quiet( + self, + git_repo: GitSync, + ) -> None: + """Test reset with quiet flag.""" + git = AsyncGit(path=git_repo.path) + output = await git.reset(quiet=True) + # Quiet mode should suppress output + assert output == "" + + @pytest.mark.asyncio + async def test_reset_pathspec_list( + self, + git_repo: GitSync, + ) -> None: + """Test reset with pathspec as list.""" + git = AsyncGit(path=git_repo.path) + # Create and stage multiple files + file1 = git_repo.path / "file1.txt" + file2 = git_repo.path / "file2.txt" + file1.write_text("content1") + file2.write_text("content2") + await git.run(["add", "file1.txt", "file2.txt"]) + + # Reset specific files using list + output = await git.reset(pathspec=["file1.txt", "file2.txt"]) + assert output is not None + + +class TestAsyncGitRebase: + """Tests for AsyncGit.rebase().""" + + @pytest.mark.asyncio + async def test_rebase_upstream( + self, + git_repo: GitSync, + ) -> None: + """Test rebase onto upstream branch.""" + git = AsyncGit(path=git_repo.path) + # Get current branch + current = await git.symbolic_ref(name="HEAD", short=True) + current_branch = current.strip() + + # Create a feature branch + await git.run(["checkout", "-b", "feature"]) + test_file = git_repo.path / "feature_file.txt" + test_file.write_text("feature content") + await git.run(["add", "feature_file.txt"]) + await git.run(["commit", "-m", "feature commit"]) + + # Rebase onto original branch (trivial rebase - already up to date) + output = await git.rebase(upstream=current_branch) + assert output is not None + + @pytest.mark.asyncio + async def test_rebase_onto( + self, + git_repo: GitSync, + ) -> None: + """Test rebase with onto option.""" + git = AsyncGit(path=git_repo.path) + # Get initial commit + initial = await git.rev_parse(args="HEAD", verify=True) + + # Create branch and commit + await git.run(["checkout", "-b", "onto_test"]) + test_file = git_repo.path / "onto_file.txt" + test_file.write_text("onto content") + await git.run(["add", "onto_file.txt"]) + await git.run(["commit", "-m", "onto commit"]) + + # Rebase onto initial commit (trivial case) + output = await git.rebase(onto=initial.strip(), upstream=initial.strip()) + assert output is not None + + @pytest.mark.asyncio + async def test_rebase_abort_no_rebase( + self, + git_repo: GitSync, + ) -> None: + """Test rebase abort when no rebase in progress.""" + git = AsyncGit(path=git_repo.path) + # Abort with no rebase in progress should fail + output = await git.rebase(abort=True, check_returncode=False) + # Returns error message or empty depending on git version + assert output is not None + + @pytest.mark.asyncio + async def test_rebase_quiet( + self, + tmp_path: Path, + ) -> None: + """Test rebase with quiet flag.""" + # Create a fresh git repo with initial commit + repo_path = tmp_path / "rebase_quiet_repo" + repo_path.mkdir() + git = AsyncGit(path=repo_path) + await git.run(["init"]) + await git.run(["config", "user.email", "test@test.com"]) + await git.run(["config", "user.name", "Test User"]) + + # Create initial commit + test_file = repo_path / "initial.txt" + test_file.write_text("initial content") + await git.run(["add", "initial.txt"]) + await git.run(["commit", "-m", "initial"]) + + # Rebase onto HEAD (trivial - no changes) + head = await git.rev_parse(args="HEAD", verify=True) + output = await git.rebase(upstream=head.strip(), quiet=True) + # Quiet mode reduces output + assert output is not None + + +class TestAsyncGitSubmoduleCmd: + """Tests for AsyncGitSubmoduleCmd.""" + + @pytest.mark.asyncio + async def test_submodule_init_no_submodules( + self, + git_repo: GitSync, + ) -> None: + """Test submodule init on repo without submodules.""" + git = AsyncGit(path=git_repo.path) + # Should succeed even without submodules + output = await git.submodule.init() + assert output == "" + + @pytest.mark.asyncio + async def test_submodule_update_no_submodules( + self, + git_repo: GitSync, + ) -> None: + """Test submodule update on repo without submodules.""" + git = AsyncGit(path=git_repo.path) + # Should succeed even without submodules + output = await git.submodule.update() + assert output == "" + + @pytest.mark.asyncio + async def test_submodule_update_with_init( + self, + git_repo: GitSync, + ) -> None: + """Test submodule update with init flag.""" + git = AsyncGit(path=git_repo.path) + output = await git.submodule.update(init=True) + assert output == "" + + @pytest.mark.asyncio + async def test_submodule_update_recursive( + self, + git_repo: GitSync, + ) -> None: + """Test submodule update with recursive flag.""" + git = AsyncGit(path=git_repo.path) + output = await git.submodule.update(recursive=True) + assert output == "" + + @pytest.mark.asyncio + async def test_submodule_update_force( + self, + git_repo: GitSync, + ) -> None: + """Test submodule update with force flag.""" + git = AsyncGit(path=git_repo.path) + output = await git.submodule.update(force=True) + assert output == "" + + @pytest.mark.asyncio + async def test_submodule_update_combined_flags( + self, + git_repo: GitSync, + ) -> None: + """Test submodule update with combined flags.""" + git = AsyncGit(path=git_repo.path) + output = await git.submodule.update(init=True, recursive=True, force=True) + assert output == "" + + +class TestAsyncGitConcurrency: + """Tests for concurrent AsyncGit operations.""" + + @pytest.mark.asyncio + async def test_concurrent_status_calls( + self, + git_repo: GitSync, + ) -> None: + """Test multiple concurrent status calls.""" + git = AsyncGit(path=git_repo.path) + + async def get_status() -> str: + return await git.status(porcelain=True) + + results = await asyncio.gather(*[get_status() for _ in range(5)]) + + assert len(results) == 5 + for result in results: + assert result == "" # Clean repo + + @pytest.mark.asyncio + async def test_concurrent_rev_parse( + self, + git_repo: GitSync, + ) -> None: + """Test multiple concurrent rev-parse calls.""" + git = AsyncGit(path=git_repo.path) + + async def get_head() -> str: + return await git.rev_parse(args="HEAD", verify=True) + + results = await asyncio.gather(*[get_head() for _ in range(5)]) + + assert len(results) == 5 + # All should return the same SHA + first_sha = results[0].strip() + for result in results[1:]: + assert result.strip() == first_sha + + +class TestAsyncGitWithCallback: + """Tests for AsyncGit with progress callbacks.""" + + @pytest.mark.asyncio + async def test_clone_with_callback( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test clone with progress callback.""" + progress_output: list[str] = [] + + async def progress_cb(output: str, timestamp: datetime.datetime) -> None: + progress_output.append(output) + + remote_repo = create_git_remote_repo() + clone_path = tmp_path / "callback_repo" + git = AsyncGit(path=clone_path, progress_callback=progress_cb) + + await git.clone(url=f"file://{remote_repo}", log_in_real_time=True) + + assert clone_path.exists() + # Progress callback should have been called + # (may be empty for local clone, but mechanism works) diff --git a/tests/cmd/_async/test_hg.py b/tests/cmd/_async/test_hg.py new file mode 100644 index 000000000..3b0e1b07a --- /dev/null +++ b/tests/cmd/_async/test_hg.py @@ -0,0 +1,179 @@ +"""Tests for libvcs.cmd._async.hg.""" + +from __future__ import annotations + +import typing as t +from pathlib import Path + +import pytest + +from libvcs.cmd._async.hg import AsyncHg +from libvcs.pytest_plugin import CreateRepoPytestFixtureFn + + +class RunFixture(t.NamedTuple): + """Test fixture for AsyncHg.run().""" + + test_id: str + args: list[str] + kwargs: dict[str, t.Any] + expected_in_output: str | None + + +RUN_FIXTURES = [ + RunFixture( + test_id="version", + args=["version"], + kwargs={}, + expected_in_output="Mercurial", + ), + RunFixture( + test_id="help_short", + args=["--help"], + kwargs={}, + expected_in_output="Mercurial", + ), +] + + +class TestAsyncHg: + """Tests for AsyncHg class.""" + + def test_init(self, tmp_path: Path) -> None: + """Test AsyncHg initialization.""" + hg = AsyncHg(path=tmp_path) + assert hg.path == tmp_path + assert hg.progress_callback is None + + def test_repr(self, tmp_path: Path) -> None: + """Test AsyncHg repr.""" + hg = AsyncHg(path=tmp_path) + assert "AsyncHg" in repr(hg) + assert str(tmp_path) in repr(hg) + + @pytest.mark.parametrize( + list(RunFixture._fields), + RUN_FIXTURES, + ids=[f.test_id for f in RUN_FIXTURES], + ) + @pytest.mark.asyncio + async def test_run( + self, + test_id: str, + args: list[str], + kwargs: dict[str, t.Any], + expected_in_output: str | None, + tmp_path: Path, + ) -> None: + """Test AsyncHg.run() with various commands.""" + hg = AsyncHg(path=tmp_path) + output = await hg.run(args, **kwargs) + if expected_in_output is not None: + assert expected_in_output in output + + +class TestAsyncHgClone: + """Tests for AsyncHg.clone().""" + + @pytest.mark.asyncio + async def test_clone_basic( + self, + tmp_path: Path, + create_hg_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic clone operation.""" + remote_repo = create_hg_remote_repo() + repo_path = tmp_path / "cloned_repo" + + hg = AsyncHg(path=repo_path) + await hg.clone(url=f"file://{remote_repo}") + + assert repo_path.exists() + assert (repo_path / ".hg").exists() + + @pytest.mark.asyncio + async def test_clone_quiet( + self, + tmp_path: Path, + create_hg_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test clone with quiet flag.""" + remote_repo = create_hg_remote_repo() + repo_path = tmp_path / "quiet_repo" + + hg = AsyncHg(path=repo_path) + await hg.clone(url=f"file://{remote_repo}", quiet=True) + + assert repo_path.exists() + + @pytest.mark.asyncio + async def test_clone_no_update( + self, + tmp_path: Path, + create_hg_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test clone with no_update flag.""" + remote_repo = create_hg_remote_repo() + repo_path = tmp_path / "noupdate_repo" + + hg = AsyncHg(path=repo_path) + await hg.clone(url=f"file://{remote_repo}", no_update=True) + + assert repo_path.exists() + assert (repo_path / ".hg").exists() + + +class TestAsyncHgUpdate: + """Tests for AsyncHg.update().""" + + @pytest.mark.asyncio + async def test_update_basic( + self, + tmp_path: Path, + create_hg_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic update operation.""" + remote_repo = create_hg_remote_repo() + repo_path = tmp_path / "update_repo" + + hg = AsyncHg(path=repo_path) + await hg.clone(url=f"file://{remote_repo}") + output = await hg.update() + + assert "files" in output or "0 files" in output + + +class TestAsyncHgPull: + """Tests for AsyncHg.pull().""" + + @pytest.mark.asyncio + async def test_pull_basic( + self, + tmp_path: Path, + create_hg_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic pull operation.""" + remote_repo = create_hg_remote_repo() + repo_path = tmp_path / "pull_repo" + + hg = AsyncHg(path=repo_path) + await hg.clone(url=f"file://{remote_repo}") + output = await hg.pull() + + assert "no changes found" in output or "pulling from" in output + + @pytest.mark.asyncio + async def test_pull_with_update( + self, + tmp_path: Path, + create_hg_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test pull with update flag.""" + remote_repo = create_hg_remote_repo() + repo_path = tmp_path / "pull_update_repo" + + hg = AsyncHg(path=repo_path) + await hg.clone(url=f"file://{remote_repo}") + output = await hg.pull(update=True) + + assert output is not None diff --git a/tests/cmd/_async/test_svn.py b/tests/cmd/_async/test_svn.py new file mode 100644 index 000000000..105fc1433 --- /dev/null +++ b/tests/cmd/_async/test_svn.py @@ -0,0 +1,168 @@ +"""Tests for libvcs.cmd._async.svn.""" + +from __future__ import annotations + +import shutil +import typing as t +from pathlib import Path + +import pytest + +from libvcs.cmd._async.svn import AsyncSvn +from libvcs.pytest_plugin import CreateRepoPytestFixtureFn + +if not shutil.which("svn"): + pytestmark = pytest.mark.skip(reason="svn is not available") + + +class RunFixture(t.NamedTuple): + """Test fixture for AsyncSvn.run().""" + + test_id: str + args: list[str] + kwargs: dict[str, t.Any] + expected_in_output: str | None + + +RUN_FIXTURES = [ + RunFixture( + test_id="version", + args=["--version"], + kwargs={}, + expected_in_output="svn", + ), + RunFixture( + test_id="help_short", + args=["help"], + kwargs={}, + expected_in_output="usage", + ), +] + + +class TestAsyncSvn: + """Tests for AsyncSvn class.""" + + def test_init(self, tmp_path: Path) -> None: + """Test AsyncSvn initialization.""" + svn = AsyncSvn(path=tmp_path) + assert svn.path == tmp_path + assert svn.progress_callback is None + + def test_repr(self, tmp_path: Path) -> None: + """Test AsyncSvn repr.""" + svn = AsyncSvn(path=tmp_path) + assert "AsyncSvn" in repr(svn) + assert str(tmp_path) in repr(svn) + + @pytest.mark.parametrize( + list(RunFixture._fields), + RUN_FIXTURES, + ids=[f.test_id for f in RUN_FIXTURES], + ) + @pytest.mark.asyncio + async def test_run( + self, + test_id: str, + args: list[str], + kwargs: dict[str, t.Any], + expected_in_output: str | None, + tmp_path: Path, + ) -> None: + """Test AsyncSvn.run() with various commands.""" + svn = AsyncSvn(path=tmp_path) + output = await svn.run(args, **kwargs) + if expected_in_output is not None: + assert expected_in_output in output.lower() + + +class TestAsyncSvnCheckout: + """Tests for AsyncSvn.checkout().""" + + @pytest.mark.asyncio + async def test_checkout_basic( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic checkout operation.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "checked_out_repo" + + svn = AsyncSvn(path=repo_path) + await svn.checkout(url=f"file://{remote_repo}") + + assert repo_path.exists() + assert (repo_path / ".svn").exists() + + @pytest.mark.asyncio + async def test_checkout_quiet( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test checkout with quiet flag.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "quiet_repo" + + svn = AsyncSvn(path=repo_path) + await svn.checkout(url=f"file://{remote_repo}", quiet=True) + + assert repo_path.exists() + + +class TestAsyncSvnUpdate: + """Tests for AsyncSvn.update().""" + + @pytest.mark.asyncio + async def test_update_basic( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic update operation.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "update_repo" + + svn = AsyncSvn(path=repo_path) + await svn.checkout(url=f"file://{remote_repo}") + output = await svn.update() + + assert "revision" in output.lower() or "at revision" in output.lower() + + +class TestAsyncSvnInfo: + """Tests for AsyncSvn.info().""" + + @pytest.mark.asyncio + async def test_info_basic( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic info operation.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "info_repo" + + svn = AsyncSvn(path=repo_path) + await svn.checkout(url=f"file://{remote_repo}") + output = await svn.info() + + assert "URL:" in output or "url" in output.lower() + + @pytest.mark.asyncio + async def test_info_xml( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test info with XML output.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "info_xml_repo" + + svn = AsyncSvn(path=repo_path) + await svn.checkout(url=f"file://{remote_repo}") + output = await svn.info(xml=True) + + assert "" in output diff --git a/tests/sync/_async/__init__.py b/tests/sync/_async/__init__.py new file mode 100644 index 000000000..b028415d1 --- /dev/null +++ b/tests/sync/_async/__init__.py @@ -0,0 +1,3 @@ +"""Tests for async sync classes.""" + +from __future__ import annotations diff --git a/tests/sync/_async/test_git.py b/tests/sync/_async/test_git.py new file mode 100644 index 000000000..c1bb72daf --- /dev/null +++ b/tests/sync/_async/test_git.py @@ -0,0 +1,580 @@ +"""Tests for libvcs.sync._async.git.""" + +from __future__ import annotations + +import asyncio +from pathlib import Path + +import pytest + +from libvcs.pytest_plugin import CreateRepoPytestFixtureFn +from libvcs.sync._async.git import AsyncGitSync +from libvcs.sync.git import GitRemote + + +class TestAsyncGitSync: + """Tests for AsyncGitSync class.""" + + def test_init(self, tmp_path: Path) -> None: + """Test AsyncGitSync initialization.""" + repo = AsyncGitSync( + url="https://github.com/test/repo", + path=tmp_path / "repo", + ) + assert repo.url == "https://github.com/test/repo" + assert repo.path == tmp_path / "repo" + assert "origin" in repo._remotes + + def test_init_with_remotes(self, tmp_path: Path) -> None: + """Test AsyncGitSync initialization with additional remotes.""" + repo = AsyncGitSync( + url="https://github.com/test/repo", + path=tmp_path / "repo", + remotes={ + "upstream": "https://github.com/upstream/repo", + }, + ) + assert "origin" in repo._remotes + assert "upstream" in repo._remotes + + def test_repr(self, tmp_path: Path) -> None: + """Test AsyncGitSync repr.""" + repo = AsyncGitSync( + url="https://github.com/test/repo", + path=tmp_path / "myrepo", + ) + assert "AsyncGitSync" in repr(repo) + assert "myrepo" in repr(repo) + + def test_chomp_protocol(self) -> None: + """Test chomp_protocol removes git+ prefix.""" + assert ( + AsyncGitSync.chomp_protocol("git+https://example.com") + == "https://example.com" + ) + assert ( + AsyncGitSync.chomp_protocol("https://example.com") == "https://example.com" + ) + + +class TestAsyncGitSyncObtain: + """Tests for AsyncGitSync.obtain().""" + + @pytest.mark.asyncio + async def test_obtain_basic( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic obtain operation.""" + remote_repo = create_git_remote_repo() + repo_path = tmp_path / "obtained_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + assert repo_path.exists() + assert (repo_path / ".git").exists() + + @pytest.mark.asyncio + async def test_obtain_shallow( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test shallow clone via obtain.""" + remote_repo = create_git_remote_repo() + repo_path = tmp_path / "shallow_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + git_shallow=True, + ) + await repo.obtain() + + assert repo_path.exists() + + +class TestAsyncGitSyncUpdateRepo: + """Tests for AsyncGitSync.update_repo().""" + + @pytest.mark.asyncio + async def test_update_repo_basic( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic update_repo operation.""" + remote_repo = create_git_remote_repo() + repo_path = tmp_path / "update_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + # First obtain + await repo.obtain() + + # Then update + await repo.update_repo() + + assert repo_path.exists() + + @pytest.mark.asyncio + async def test_update_repo_obtains_if_missing( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test update_repo clones if repo doesn't exist.""" + remote_repo = create_git_remote_repo() + repo_path = tmp_path / "new_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + # Just update_repo without obtain first + await repo.update_repo() + + # Should have cloned + assert repo_path.exists() + assert (repo_path / ".git").exists() + + +class TestAsyncGitSyncGetRevision: + """Tests for AsyncGitSync.get_revision().""" + + @pytest.mark.asyncio + async def test_get_revision( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test get_revision returns current HEAD or 'initial'.""" + remote_repo = create_git_remote_repo() + repo_path = tmp_path / "rev_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + revision = await repo.get_revision() + assert revision + # Either a 40-char SHA or "initial" for empty repos + stripped = revision.strip() + assert len(stripped) == 40 or stripped == "initial" + + +class TestAsyncGitSyncRemotes: + """Tests for AsyncGitSync remote management.""" + + @pytest.mark.asyncio + async def test_remotes_get( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test remotes_get returns remotes.""" + remote_repo = create_git_remote_repo() + repo_path = tmp_path / "remotes_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + remotes = await repo.remotes_get() + assert "origin" in remotes + assert isinstance(remotes["origin"], GitRemote) + + @pytest.mark.asyncio + async def test_remote_get_single( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test remote() returns single remote.""" + remote_repo = create_git_remote_repo() + repo_path = tmp_path / "single_remote_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + origin = await repo.remote("origin") + assert origin is not None + assert origin.name == "origin" + + @pytest.mark.asyncio + async def test_set_remote( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test set_remote adds a new remote.""" + remote_repo = create_git_remote_repo() + # Create a second remote repo to use as upstream + upstream_repo = create_git_remote_repo() + repo_path = tmp_path / "set_remote_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + # Add a new remote pointing to another valid repo + await repo.set_remote(name="upstream", url=f"file://{upstream_repo}") + + # Verify it exists + upstream = await repo.remote("upstream") + assert upstream is not None + assert upstream.name == "upstream" + + +class TestAsyncGitSyncStatus: + """Tests for AsyncGitSync.status().""" + + @pytest.mark.asyncio + async def test_status( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test status() returns GitStatus without error.""" + remote_repo = create_git_remote_repo() + repo_path = tmp_path / "status_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + # Just verify status() runs without error + # The GitStatus parser may not find data for all repos + status = await repo.status() + from libvcs.sync.git import GitStatus + + assert isinstance(status, GitStatus) + + +class TestAsyncGitRepoFixture: + """Tests for the async_git_repo pytest fixture.""" + + @pytest.mark.asyncio + async def test_async_git_repo_fixture( + self, + async_git_repo: AsyncGitSync, + ) -> None: + """Test that async_git_repo fixture provides a working repository.""" + # Verify the repo exists and is initialized + assert async_git_repo.path.exists() + assert (async_git_repo.path / ".git").exists() + + # Verify we can perform async operations + revision = await async_git_repo.get_revision() + assert revision + assert len(revision.strip()) == 40 # Full SHA + + @pytest.mark.asyncio + async def test_async_git_repo_status( + self, + async_git_repo: AsyncGitSync, + ) -> None: + """Test that status() works on fixture-provided repo.""" + from libvcs.sync.git import GitStatus + + status = await async_git_repo.status() + assert isinstance(status, GitStatus) + + @pytest.mark.asyncio + async def test_async_git_repo_remotes( + self, + async_git_repo: AsyncGitSync, + ) -> None: + """Test that remotes are properly configured on fixture-provided repo.""" + remotes = await async_git_repo.remotes_get() + assert "origin" in remotes + + +class TestAsyncGitSyncFromPipUrl: + """Tests for AsyncGitSync.from_pip_url().""" + + def test_from_pip_url_https(self, tmp_path: Path) -> None: + """Test from_pip_url with git+https URL.""" + repo = AsyncGitSync.from_pip_url( + pip_url="git+https://github.com/test/repo.git", + path=tmp_path / "pip_repo", + ) + assert repo.url == "https://github.com/test/repo.git" + assert repo.path == tmp_path / "pip_repo" + + def test_from_pip_url_with_revision(self, tmp_path: Path) -> None: + """Test from_pip_url with revision specifier.""" + repo = AsyncGitSync.from_pip_url( + pip_url="git+https://github.com/test/repo.git@v1.0.0", + path=tmp_path / "pip_repo", + ) + assert repo.url == "https://github.com/test/repo.git" + assert repo.rev == "v1.0.0" + + def test_from_pip_url_ssh(self, tmp_path: Path) -> None: + """Test from_pip_url with git+ssh URL.""" + repo = AsyncGitSync.from_pip_url( + pip_url="git+ssh://git@github.com/test/repo.git", + path=tmp_path / "pip_repo", + ) + assert repo.url == "ssh://git@github.com/test/repo.git" + + +class TestAsyncGitSyncGetGitVersion: + """Tests for AsyncGitSync.get_git_version().""" + + @pytest.mark.asyncio + async def test_get_git_version( + self, + async_git_repo: AsyncGitSync, + ) -> None: + """Test get_git_version returns version string.""" + version = await async_git_repo.get_git_version() + assert "git version" in version + + @pytest.mark.asyncio + async def test_get_git_version_format( + self, + async_git_repo: AsyncGitSync, + ) -> None: + """Test get_git_version returns expected format.""" + version = await async_git_repo.get_git_version() + # Version string should contain numbers + import re + + assert re.search(r"\d+\.\d+", version) + + +class TestAsyncGitSyncUpdateRepoStash: + """Tests for AsyncGitSync.update_repo() stash handling.""" + + @pytest.mark.asyncio + async def test_update_repo_with_uncommitted_changes( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test update_repo with uncommitted changes triggers stash.""" + from libvcs.pytest_plugin import git_remote_repo_single_commit_post_init + + # Create remote with a commit + remote_repo = create_git_remote_repo( + remote_repo_post_init=git_remote_repo_single_commit_post_init + ) + repo_path = tmp_path / "stash_test_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + # Create uncommitted changes + test_file = repo_path / "local_change.txt" + test_file.write_text("local uncommitted content") + await repo.cmd.run(["add", "local_change.txt"]) + + # Update should handle the uncommitted changes (may stash if needed) + # This tests the stash logic path + await repo.update_repo() + + # Repo should still exist and be valid + assert repo_path.exists() + revision = await repo.get_revision() + assert revision + + @pytest.mark.asyncio + async def test_update_repo_clean_working_tree( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test update_repo with clean working tree skips stash.""" + from libvcs.pytest_plugin import git_remote_repo_single_commit_post_init + + remote_repo = create_git_remote_repo( + remote_repo_post_init=git_remote_repo_single_commit_post_init + ) + repo_path = tmp_path / "clean_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + # No changes - update should succeed without stash + await repo.update_repo() + + assert repo_path.exists() + + +class TestAsyncGitSyncSetRemotes: + """Tests for AsyncGitSync.set_remotes().""" + + @pytest.mark.asyncio + async def test_set_remotes_overwrite_false( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test set_remotes with overwrite=False preserves existing remotes.""" + remote_repo = create_git_remote_repo() + repo_path = tmp_path / "set_remotes_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + # Get original origin URL + original_remotes = await repo.remotes_get() + original_origin_url = original_remotes["origin"].fetch_url + + # Try to set remotes with overwrite=False + await repo.set_remotes(overwrite=False) + + # Origin should still have same URL + updated_remotes = await repo.remotes_get() + assert updated_remotes["origin"].fetch_url == original_origin_url + + @pytest.mark.asyncio + async def test_set_remotes_adds_new_remote( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test set_remotes adds new remotes from configuration.""" + remote_repo = create_git_remote_repo() + upstream_repo = create_git_remote_repo() + repo_path = tmp_path / "add_remote_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + remotes={"upstream": f"file://{upstream_repo}"}, + ) + await repo.obtain() + + # Set remotes should add upstream + await repo.set_remotes(overwrite=False) + + remotes = await repo.remotes_get() + assert "origin" in remotes + assert "upstream" in remotes + + +class TestAsyncGitSyncGetCurrentRemoteName: + """Tests for AsyncGitSync.get_current_remote_name().""" + + @pytest.mark.asyncio + async def test_get_current_remote_name_default( + self, + async_git_repo: AsyncGitSync, + ) -> None: + """Test get_current_remote_name returns origin by default.""" + remote_name = await async_git_repo.get_current_remote_name() + assert remote_name == "origin" + + @pytest.mark.asyncio + async def test_get_current_remote_name_on_detached_head( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test get_current_remote_name on detached HEAD falls back to origin.""" + from libvcs.pytest_plugin import git_remote_repo_single_commit_post_init + + remote_repo = create_git_remote_repo( + remote_repo_post_init=git_remote_repo_single_commit_post_init + ) + repo_path = tmp_path / "detached_head_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + # Detach HEAD + head = await repo.cmd.rev_parse(args="HEAD", verify=True) + await repo.cmd.checkout(branch=head.strip(), detach=True) + + # Should fall back to 'origin' + remote_name = await repo.get_current_remote_name() + assert remote_name == "origin" + + +class TestAsyncGitSyncConcurrency: + """Tests for concurrent AsyncGitSync operations.""" + + @pytest.mark.asyncio + async def test_concurrent_obtain( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test multiple concurrent obtain operations.""" + remote_repo = create_git_remote_repo() + + async def clone_repo(i: int) -> AsyncGitSync: + repo_path = tmp_path / f"concurrent_repo_{i}" + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + return repo + + repos = await asyncio.gather(*[clone_repo(i) for i in range(3)]) + + assert len(repos) == 3 + for repo in repos: + assert repo.path.exists() + assert (repo.path / ".git").exists() + + @pytest.mark.asyncio + async def test_concurrent_status_calls( + self, + tmp_path: Path, + create_git_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test multiple concurrent status calls on same repo.""" + remote_repo = create_git_remote_repo() + repo_path = tmp_path / "concurrent_status_repo" + + repo = AsyncGitSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + async def get_revision() -> str: + return await repo.get_revision() + + results = await asyncio.gather(*[get_revision() for _ in range(5)]) + + assert len(results) == 5 + # All should return the same SHA + first_sha = results[0].strip() + for result in results[1:]: + assert result.strip() == first_sha diff --git a/tests/sync/_async/test_hg.py b/tests/sync/_async/test_hg.py new file mode 100644 index 000000000..d255ee723 --- /dev/null +++ b/tests/sync/_async/test_hg.py @@ -0,0 +1,148 @@ +"""Tests for libvcs.sync._async.hg.""" + +from __future__ import annotations + +from pathlib import Path + +import pytest + +from libvcs.pytest_plugin import CreateRepoPytestFixtureFn +from libvcs.sync._async.hg import AsyncHgSync + + +class TestAsyncHgSync: + """Tests for AsyncHgSync class.""" + + def test_init(self, tmp_path: Path) -> None: + """Test AsyncHgSync initialization.""" + repo = AsyncHgSync( + url="https://hg.example.com/repo", + path=tmp_path / "repo", + ) + assert repo.url == "https://hg.example.com/repo" + assert repo.path == tmp_path / "repo" + + def test_repr(self, tmp_path: Path) -> None: + """Test AsyncHgSync repr.""" + repo = AsyncHgSync( + url="https://hg.example.com/repo", + path=tmp_path / "myrepo", + ) + assert "AsyncHgSync" in repr(repo) + assert "myrepo" in repr(repo) + + +class TestAsyncHgSyncObtain: + """Tests for AsyncHgSync.obtain().""" + + @pytest.mark.asyncio + async def test_obtain_basic( + self, + tmp_path: Path, + create_hg_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic obtain operation.""" + remote_repo = create_hg_remote_repo() + repo_path = tmp_path / "obtained_repo" + + repo = AsyncHgSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + assert repo_path.exists() + assert (repo_path / ".hg").exists() + + +class TestAsyncHgSyncUpdateRepo: + """Tests for AsyncHgSync.update_repo().""" + + @pytest.mark.asyncio + async def test_update_repo_basic( + self, + tmp_path: Path, + create_hg_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic update_repo operation.""" + remote_repo = create_hg_remote_repo() + repo_path = tmp_path / "update_repo" + + repo = AsyncHgSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + # First obtain + await repo.obtain() + + # Then update + await repo.update_repo() + + assert repo_path.exists() + + @pytest.mark.asyncio + async def test_update_repo_obtains_if_missing( + self, + tmp_path: Path, + create_hg_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test update_repo clones if repo doesn't exist.""" + remote_repo = create_hg_remote_repo() + repo_path = tmp_path / "new_repo" + + repo = AsyncHgSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + # Just update_repo without obtain first + await repo.update_repo() + + # Should have cloned + assert repo_path.exists() + assert (repo_path / ".hg").exists() + + +class TestAsyncHgSyncGetRevision: + """Tests for AsyncHgSync.get_revision().""" + + @pytest.mark.asyncio + async def test_get_revision( + self, + tmp_path: Path, + create_hg_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test get_revision returns current revision.""" + remote_repo = create_hg_remote_repo() + repo_path = tmp_path / "rev_repo" + + repo = AsyncHgSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + revision = await repo.get_revision() + # Mercurial revisions are numeric (0, 1, 2, ...) + assert revision.strip().isdigit() or revision.strip() == "" + + +class TestAsyncHgRepoFixture: + """Tests for the async_hg_repo pytest fixture.""" + + @pytest.mark.asyncio + async def test_async_hg_repo_fixture( + self, + async_hg_repo: AsyncHgSync, + ) -> None: + """Test that async_hg_repo fixture provides a working repository.""" + assert async_hg_repo.path.exists() + assert (async_hg_repo.path / ".hg").exists() + + @pytest.mark.asyncio + async def test_async_hg_repo_revision( + self, + async_hg_repo: AsyncHgSync, + ) -> None: + """Test async_hg_repo fixture can get revision.""" + revision = await async_hg_repo.get_revision() + assert revision.strip().isdigit() or revision.strip() == "" diff --git a/tests/sync/_async/test_svn.py b/tests/sync/_async/test_svn.py new file mode 100644 index 000000000..c1d367660 --- /dev/null +++ b/tests/sync/_async/test_svn.py @@ -0,0 +1,271 @@ +"""Tests for libvcs.sync._async.svn.""" + +from __future__ import annotations + +import shutil +from pathlib import Path + +import pytest + +from libvcs.pytest_plugin import CreateRepoPytestFixtureFn +from libvcs.sync._async.svn import AsyncSvnSync + +if not shutil.which("svn"): + pytestmark = pytest.mark.skip(reason="svn is not available") + + +class TestAsyncSvnSyncGetSvnUrlRev: + """Tests for AsyncSvnSync._get_svn_url_rev().""" + + @pytest.mark.asyncio + async def test_get_svn_url_rev_from_working_copy( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test _get_svn_url_rev from actual working copy.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "url_rev_repo" + + repo = AsyncSvnSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + # Get URL and revision from the working copy + url, rev = await repo._get_svn_url_rev(str(repo_path)) + + # URL should match the remote + assert url is not None + assert str(remote_repo) in url + # Revision should be 0 for empty repo + assert rev == 0 + + @pytest.mark.asyncio + async def test_get_svn_url_rev_nonexistent_location( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test _get_svn_url_rev with non-existent location.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "url_rev_repo" + + repo = AsyncSvnSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + # Get URL and revision from non-existent path + url, rev = await repo._get_svn_url_rev(str(tmp_path / "nonexistent")) + + # Should return None, 0 for non-existent path + assert url is None + assert rev == 0 + + @pytest.mark.asyncio + async def test_get_svn_url_rev_xml_format( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test _get_svn_url_rev with XML format entries file.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "xml_repo" + + repo = AsyncSvnSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + # Create a mock XML entries file + entries_path = repo_path / ".svn" / "entries" + xml_data = f""" + + + +""" + entries_path.write_text(xml_data) + + # Parse the mock entries file + url, rev = await repo._get_svn_url_rev(str(repo_path)) + + # Should parse URL and max revision from XML + assert url is not None + assert str(remote_repo) in url + assert rev == 42 # max of 42 and 10 + + +class TestAsyncSvnSync: + """Tests for AsyncSvnSync class.""" + + def test_init(self, tmp_path: Path) -> None: + """Test AsyncSvnSync initialization.""" + repo = AsyncSvnSync( + url="file:///path/to/repo", + path=tmp_path / "repo", + ) + assert repo.url == "file:///path/to/repo" + assert repo.path == tmp_path / "repo" + + def test_repr(self, tmp_path: Path) -> None: + """Test AsyncSvnSync repr.""" + repo = AsyncSvnSync( + url="file:///path/to/repo", + path=tmp_path / "myrepo", + ) + assert "AsyncSvnSync" in repr(repo) + assert "myrepo" in repr(repo) + + def test_init_with_auth(self, tmp_path: Path) -> None: + """Test AsyncSvnSync initialization with auth credentials.""" + repo = AsyncSvnSync( + url="svn://svn.example.com/repo", + path=tmp_path / "repo", + username="user", + password="pass", + svn_trust_cert=True, + ) + assert repo.username == "user" + assert repo.password == "pass" + assert repo.svn_trust_cert is True + + +class TestAsyncSvnSyncObtain: + """Tests for AsyncSvnSync.obtain().""" + + @pytest.mark.asyncio + async def test_obtain_basic( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic obtain operation.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "obtained_repo" + + repo = AsyncSvnSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + assert repo_path.exists() + assert (repo_path / ".svn").exists() + + +class TestAsyncSvnSyncUpdateRepo: + """Tests for AsyncSvnSync.update_repo().""" + + @pytest.mark.asyncio + async def test_update_repo_basic( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test basic update_repo operation.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "update_repo" + + repo = AsyncSvnSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + # First obtain + await repo.obtain() + + # Then update + await repo.update_repo() + + assert repo_path.exists() + + @pytest.mark.asyncio + async def test_update_repo_obtains_if_missing( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test update_repo checks out if repo doesn't exist.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "new_repo" + + repo = AsyncSvnSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + # Just update_repo without obtain first + await repo.update_repo() + + # Should have checked out + assert repo_path.exists() + assert (repo_path / ".svn").exists() + + +class TestAsyncSvnSyncGetRevision: + """Tests for AsyncSvnSync.get_revision().""" + + @pytest.mark.asyncio + async def test_get_revision( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test get_revision returns current revision.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "rev_repo" + + repo = AsyncSvnSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + revision = await repo.get_revision() + # SVN revisions start at 0 for empty repos + assert revision == 0 + + @pytest.mark.asyncio + async def test_get_revision_file( + self, + tmp_path: Path, + create_svn_remote_repo: CreateRepoPytestFixtureFn, + ) -> None: + """Test get_revision_file returns file revision.""" + remote_repo = create_svn_remote_repo() + repo_path = tmp_path / "rev_file_repo" + + repo = AsyncSvnSync( + url=f"file://{remote_repo}", + path=repo_path, + ) + await repo.obtain() + + revision = await repo.get_revision_file("./") + # SVN revisions start at 0 for empty repos + assert revision == 0 + + +class TestAsyncSvnRepoFixture: + """Tests for the async_svn_repo pytest fixture.""" + + @pytest.mark.asyncio + async def test_async_svn_repo_fixture( + self, + async_svn_repo: AsyncSvnSync, + ) -> None: + """Test that async_svn_repo fixture provides a working repository.""" + assert async_svn_repo.path.exists() + assert (async_svn_repo.path / ".svn").exists() + + @pytest.mark.asyncio + async def test_async_svn_repo_revision( + self, + async_svn_repo: AsyncSvnSync, + ) -> None: + """Test async_svn_repo fixture can get revision.""" + revision = await async_svn_repo.get_revision() + # SVN revisions start at 0 for empty repos + assert revision == 0 diff --git a/tests/test_fixture_performance.py b/tests/test_fixture_performance.py new file mode 100644 index 000000000..8841231a9 --- /dev/null +++ b/tests/test_fixture_performance.py @@ -0,0 +1,721 @@ +"""Performance tests for libvcs pytest fixtures. + +These tests verify that fixture caching works correctly and measures +fixture setup times. They are skipped by default - run with --run-performance. +""" + +from __future__ import annotations + +import pathlib +import typing as t + +import pytest + +from libvcs.sync.git import GitSync +from libvcs.sync.hg import HgSync +from libvcs.sync.svn import SvnSync + +if t.TYPE_CHECKING: + from libvcs.pytest_plugin import RepoFixtureResult + from libvcs.sync._async.git import AsyncGitSync + from libvcs.sync._async.hg import AsyncHgSync + from libvcs.sync._async.svn import AsyncSvnSync + + +# ============================================================================= +# Git Fixture Performance Tests +# ============================================================================= + + +@pytest.mark.performance +def test_git_repo_fixture_uses_cache( + git_repo: GitSync, + remote_repos_path: pathlib.Path, +) -> None: + """Verify git_repo fixture uses master_copy caching.""" + master_copy = remote_repos_path / "git_repo_master" + assert master_copy.exists(), "master_copy should exist after fixture setup" + assert (master_copy / ".git").exists(), "master_copy should be a valid git repo" + + +@pytest.mark.performance +def test_git_remote_repo_uses_persistent_cache( + git_remote_repo: pathlib.Path, + libvcs_persistent_cache: pathlib.Path, +) -> None: + """Verify git_remote_repo uses XDG persistent cache.""" + assert str(libvcs_persistent_cache) in str(git_remote_repo) + # Non-bare repo has .git directory + assert (git_remote_repo / ".git").exists(), "remote should have .git directory" + + +@pytest.mark.performance +def test_git_repo_fixture_setup_time( + git_repo: GitSync, +) -> None: + """Verify git_repo fixture setup is fast (uses cached copy).""" + # The fixture is already set up - this test just verifies it works + # The actual timing is captured by the fixture profiling hooks + assert git_repo.path.exists() + + +@pytest.mark.performance +def test_git_repo_fixture_provides_working_repo( + git_repo: GitSync, +) -> None: + """Verify git_repo fixture provides a functional repository.""" + # Should have .git directory + git_dir = pathlib.Path(git_repo.path) / ".git" + assert git_dir.exists(), "git_repo should have .git directory" + + # Should be able to get revision + revision = git_repo.get_revision() + assert revision, "git_repo should have a revision" + + +# ============================================================================= +# Mercurial Fixture Performance Tests +# ============================================================================= + + +@pytest.mark.performance +def test_hg_repo_fixture_uses_cache( + hg_repo: HgSync, + remote_repos_path: pathlib.Path, +) -> None: + """Verify hg_repo fixture uses master_copy caching.""" + master_copy = remote_repos_path / "hg_repo_master" + assert master_copy.exists(), "master_copy should exist after fixture setup" + assert (master_copy / ".hg").exists(), "master_copy should be a valid hg repo" + + +@pytest.mark.performance +def test_hg_remote_repo_uses_persistent_cache( + hg_remote_repo: pathlib.Path, + libvcs_persistent_cache: pathlib.Path, +) -> None: + """Verify hg_remote_repo uses XDG persistent cache.""" + assert str(libvcs_persistent_cache) in str(hg_remote_repo) + assert (hg_remote_repo / ".hg").exists(), "remote should have .hg" + + +@pytest.mark.performance +def test_hg_repo_fixture_provides_working_repo( + hg_repo: HgSync, +) -> None: + """Verify hg_repo fixture provides a functional repository.""" + hg_dir = pathlib.Path(hg_repo.path) / ".hg" + assert hg_dir.exists(), "hg_repo should have .hg directory" + + +@pytest.mark.performance +def test_hg_remote_repo_has_marker_file( + hg_remote_repo: pathlib.Path, +) -> None: + """Verify hg_remote_repo uses marker file for initialization tracking.""" + marker = hg_remote_repo / ".libvcs_initialized" + assert marker.exists(), "hg_remote_repo should have .libvcs_initialized marker" + + +@pytest.mark.performance +def test_hg_repo_warm_cache_is_fast( + hg_repo: RepoFixtureResult[HgSync], +) -> None: + """Verify hg_repo warm cache uses copytree (should be <50ms). + + Mercurial is inherently slow (~100ms for hg --version alone), + so we verify that cached runs avoid hg commands entirely. + """ + # If from_cache is True, this was a copytree operation + if hg_repo.from_cache: + # created_at is relative perf_counter, but we verify it's fast + assert hg_repo.master_copy_path.exists() + + +# ============================================================================= +# SVN Fixture Performance Tests +# ============================================================================= + + +@pytest.mark.performance +def test_svn_repo_fixture_uses_cache( + svn_repo: SvnSync, + remote_repos_path: pathlib.Path, +) -> None: + """Verify svn_repo fixture uses master_copy caching.""" + master_copy = remote_repos_path / "svn_repo_master" + assert master_copy.exists(), "master_copy should exist after fixture setup" + assert (master_copy / ".svn").exists(), "master_copy should be a valid svn checkout" + + +@pytest.mark.performance +def test_svn_remote_repo_uses_persistent_cache( + svn_remote_repo: pathlib.Path, + libvcs_persistent_cache: pathlib.Path, +) -> None: + """Verify svn_remote_repo uses XDG persistent cache.""" + assert str(libvcs_persistent_cache) in str(svn_remote_repo) + assert (svn_remote_repo / "format").exists(), "remote should have format file" + + +@pytest.mark.performance +def test_svn_repo_fixture_provides_working_repo( + svn_repo: SvnSync, +) -> None: + """Verify svn_repo fixture provides a functional repository.""" + # Should have .svn directory + svn_dir = pathlib.Path(svn_repo.path) / ".svn" + assert svn_dir.exists(), "svn_repo should have .svn directory" + + # Should be able to get revision (0 is valid for initial checkout) + revision = svn_repo.get_revision() + assert revision is not None, "svn_repo should return a revision" + + +@pytest.mark.performance +def test_svn_remote_repo_has_marker_file( + svn_remote_repo: pathlib.Path, +) -> None: + """Verify svn_remote_repo uses marker file for initialization tracking.""" + marker = svn_remote_repo / ".libvcs_initialized" + assert marker.exists(), "svn_remote_repo should have .libvcs_initialized marker" + + +@pytest.mark.performance +def test_svn_repo_warm_cache_is_fast( + svn_repo: RepoFixtureResult[SvnSync], +) -> None: + """Verify svn_repo warm cache uses copytree (should be <50ms). + + SVN checkout is slow (~500ms for svn co alone), + so we verify that cached runs avoid svn commands entirely. + """ + # If from_cache is True, this was a copytree operation + if svn_repo.from_cache: + # created_at is relative perf_counter, but we verify it's fast + assert svn_repo.master_copy_path.exists() + + +# ============================================================================= +# Async Fixture Performance Tests +# ============================================================================= + + +@pytest.mark.performance +@pytest.mark.asyncio +async def test_async_git_repo_fixture_uses_shared_cache( + async_git_repo: AsyncGitSync, + remote_repos_path: pathlib.Path, +) -> None: + """Verify async_git_repo shares master_copy with sync git_repo.""" + # Both sync and async should use the same master copy + master_copy = remote_repos_path / "git_repo_master" + assert master_copy.exists(), "shared master_copy should exist" + + +@pytest.mark.performance +@pytest.mark.asyncio +async def test_async_hg_repo_fixture_uses_shared_cache( + async_hg_repo: AsyncHgSync, + remote_repos_path: pathlib.Path, +) -> None: + """Verify async_hg_repo shares master_copy with sync hg_repo.""" + master_copy = remote_repos_path / "hg_repo_master" + assert master_copy.exists(), "shared master_copy should exist" + + +@pytest.mark.performance +@pytest.mark.asyncio +async def test_async_svn_repo_fixture_uses_shared_cache( + async_svn_repo: AsyncSvnSync, + remote_repos_path: pathlib.Path, +) -> None: + """Verify async_svn_repo shares master_copy with sync svn_repo.""" + master_copy = remote_repos_path / "svn_repo_master" + assert master_copy.exists(), "shared master_copy should exist" + + +# ============================================================================= +# Cache Invalidation Tests +# ============================================================================= + + +@pytest.mark.performance +def test_persistent_cache_has_version_key( + libvcs_persistent_cache: pathlib.Path, +) -> None: + """Verify persistent cache uses version-based directory.""" + # The cache path should end with a 12-char hex hash + cache_name = libvcs_persistent_cache.name + assert len(cache_name) == 12, f"cache key should be 12 chars, got: {cache_name}" + # Should be valid hex + try: + int(cache_name, 16) + except ValueError: + pytest.fail(f"cache key should be hex, got: {cache_name}") + + +@pytest.mark.performance +def test_persistent_cache_location_follows_xdg( + libvcs_persistent_cache: pathlib.Path, +) -> None: + """Verify persistent cache is in XDG cache structure.""" + # The cache should be at /libvcs-test// + # We verify the structure rather than exact path (since HOME is monkeypatched) + assert libvcs_persistent_cache.parent.name == "libvcs-test" + # Cache key should be 12-char hex + assert len(libvcs_persistent_cache.name) == 12 + + +@pytest.mark.performance +def test_version_key_is_cached_to_disk( + libvcs_persistent_cache: pathlib.Path, +) -> None: + """Verify VCS version detection is cached to disk. + + This optimization avoids running slow `hg --version` (102ms) on every + pytest session by caching the computed cache key to disk with 24h TTL. + """ + cache_key_file = libvcs_persistent_cache.parent / ".cache_key" + assert cache_key_file.exists(), ".cache_key file should exist" + # Should contain the same 12-char hex key as the cache directory name + cached_key = cache_key_file.read_text().strip() + assert cached_key == libvcs_persistent_cache.name + + +# ============================================================================= +# Multiple Fixture Usage Tests +# ============================================================================= + + +@pytest.mark.performance +def test_multiple_git_repo_instances_are_isolated( + git_repo: GitSync, + remote_repos_path: pathlib.Path, +) -> None: + """Verify each test gets its own copy of git_repo.""" + # Create a file in this repo + test_file = pathlib.Path(git_repo.path) / "test_isolation.txt" + test_file.write_text("test isolation") + + # The master copy should NOT have this file + master_copy = remote_repos_path / "git_repo_master" + master_test_file = master_copy / "test_isolation.txt" + assert not master_test_file.exists(), "test changes should not affect master_copy" + + +@pytest.mark.performance +def test_fixture_timing_baseline( + git_repo: GitSync, + hg_repo: HgSync, + svn_repo: SvnSync, +) -> None: + """Baseline test that uses all three repo fixtures. + + This test helps establish timing baselines when running with + --fixture-durations. + """ + assert pathlib.Path(git_repo.path).exists() + assert pathlib.Path(hg_repo.path).exists() + assert pathlib.Path(svn_repo.path).exists() + + +# ============================================================================= +# Copy Method Benchmarks +# ============================================================================= +# These benchmarks compare native VCS copy commands against shutil.copytree +# to determine which method is faster for each VCS type. + + +class CopyBenchmarkResult(t.NamedTuple): + """Result from a copy benchmark iteration.""" + + method: str + duration_ms: float + + +def _benchmark_copy( + src: pathlib.Path, + dst_base: pathlib.Path, + copy_fn: t.Callable[[pathlib.Path, pathlib.Path], None], + iterations: int = 5, +) -> list[float]: + """Run copy benchmark for multiple iterations, return durations in ms.""" + import shutil + import time + + durations: list[float] = [] + for i in range(iterations): + dst = dst_base / f"iter_{i}" + if dst.exists(): + shutil.rmtree(dst) + + start = time.perf_counter() + copy_fn(src, dst) + duration_ms = (time.perf_counter() - start) * 1000 + durations.append(duration_ms) + + # Cleanup for next iteration + if dst.exists(): + shutil.rmtree(dst) + + return durations + + +@pytest.mark.performance +@pytest.mark.benchmark +def test_benchmark_svn_copy_methods( + empty_svn_repo: pathlib.Path, + tmp_path: pathlib.Path, +) -> None: + """Benchmark svnadmin hotcopy vs shutil.copytree for SVN repos. + + This test determines if svnadmin hotcopy is faster than shutil.copytree. + Results are printed to help decide which method to use in fixtures. + """ + import shutil + import subprocess + + def copytree_copy(src: pathlib.Path, dst: pathlib.Path) -> None: + shutil.copytree(src, dst) + + def hotcopy_copy(src: pathlib.Path, dst: pathlib.Path) -> None: + dst.parent.mkdir(parents=True, exist_ok=True) + subprocess.run( + ["svnadmin", "hotcopy", str(src), str(dst)], + check=True, + capture_output=True, + timeout=30, + ) + + # Benchmark both methods + copytree_times = _benchmark_copy( + empty_svn_repo, tmp_path / "copytree", copytree_copy + ) + hotcopy_times = _benchmark_copy(empty_svn_repo, tmp_path / "hotcopy", hotcopy_copy) + + # Calculate statistics + copytree_avg = sum(copytree_times) / len(copytree_times) + copytree_min = min(copytree_times) + hotcopy_avg = sum(hotcopy_times) / len(hotcopy_times) + hotcopy_min = min(hotcopy_times) + + # Report results + print("\n" + "=" * 60) + print("SVN Copy Method Benchmark Results") + print("=" * 60) + print(f"shutil.copytree: avg={copytree_avg:.2f}ms, min={copytree_min:.2f}ms") + print(f"svnadmin hotcopy: avg={hotcopy_avg:.2f}ms, min={hotcopy_min:.2f}ms") + print(f"Speedup: {copytree_avg / hotcopy_avg:.2f}x") + print(f"Winner: {'hotcopy' if hotcopy_avg < copytree_avg else 'copytree'}") + print("=" * 60) + + # Store results for analysis (test always passes - it's informational) + # The assertion is informational - we want to see results regardless + assert True, ( + f"SVN benchmark: copytree={copytree_avg:.2f}ms, hotcopy={hotcopy_avg:.2f}ms" + ) + + +@pytest.mark.performance +@pytest.mark.benchmark +def test_benchmark_git_copy_methods( + empty_git_repo: pathlib.Path, + tmp_path: pathlib.Path, +) -> None: + """Benchmark git clone --local vs shutil.copytree for Git repos. + + Git's --local flag uses hardlinks when possible, which can be faster. + """ + import shutil + import subprocess + + def copytree_copy(src: pathlib.Path, dst: pathlib.Path) -> None: + shutil.copytree(src, dst) + + def git_clone_local(src: pathlib.Path, dst: pathlib.Path) -> None: + dst.parent.mkdir(parents=True, exist_ok=True) + subprocess.run( + ["git", "clone", "--local", str(src), str(dst)], + check=True, + capture_output=True, + timeout=30, + ) + + # Benchmark both methods + copytree_times = _benchmark_copy( + empty_git_repo, tmp_path / "copytree", copytree_copy + ) + clone_times = _benchmark_copy(empty_git_repo, tmp_path / "clone", git_clone_local) + + # Calculate statistics + copytree_avg = sum(copytree_times) / len(copytree_times) + copytree_min = min(copytree_times) + clone_avg = sum(clone_times) / len(clone_times) + clone_min = min(clone_times) + + # Report results + print("\n" + "=" * 60) + print("Git Copy Method Benchmark Results") + print("=" * 60) + print(f"shutil.copytree: avg={copytree_avg:.2f}ms, min={copytree_min:.2f}ms") + print(f"git clone --local: avg={clone_avg:.2f}ms, min={clone_min:.2f}ms") + print(f"Speedup: {copytree_avg / clone_avg:.2f}x") + print(f"Winner: {'clone' if clone_avg < copytree_avg else 'copytree'}") + print("=" * 60) + + assert True, ( + f"Git benchmark: copytree={copytree_avg:.2f}ms, clone={clone_avg:.2f}ms" + ) + + +@pytest.mark.performance +@pytest.mark.benchmark +def test_benchmark_hg_copy_methods( + empty_hg_repo: pathlib.Path, + tmp_path: pathlib.Path, + hgconfig: pathlib.Path, +) -> None: + """Benchmark hg clone vs shutil.copytree for Mercurial repos. + + Mercurial's clone can use hardlinks with --pull, but hg is inherently slow. + """ + import os + import shutil + import subprocess + + env = {**os.environ, "HGRCPATH": str(hgconfig)} + + def copytree_copy(src: pathlib.Path, dst: pathlib.Path) -> None: + shutil.copytree(src, dst) + + def hg_clone(src: pathlib.Path, dst: pathlib.Path) -> None: + dst.parent.mkdir(parents=True, exist_ok=True) + subprocess.run( + ["hg", "clone", str(src), str(dst)], + check=True, + capture_output=True, + timeout=60, + env=env, + ) + + # Benchmark both methods + copytree_times = _benchmark_copy( + empty_hg_repo, tmp_path / "copytree", copytree_copy + ) + clone_times = _benchmark_copy(empty_hg_repo, tmp_path / "clone", hg_clone) + + # Calculate statistics + copytree_avg = sum(copytree_times) / len(copytree_times) + copytree_min = min(copytree_times) + clone_avg = sum(clone_times) / len(clone_times) + clone_min = min(clone_times) + + # Report results + print("\n" + "=" * 60) + print("Mercurial Copy Method Benchmark Results") + print("=" * 60) + print(f"shutil.copytree: avg={copytree_avg:.2f}ms, min={copytree_min:.2f}ms") + print(f"hg clone: avg={clone_avg:.2f}ms, min={clone_min:.2f}ms") + print(f"Speedup: {copytree_avg / clone_avg:.2f}x") + print(f"Winner: {'clone' if clone_avg < copytree_avg else 'copytree'}") + print("=" * 60) + + assert True, f"Hg benchmark: copytree={copytree_avg:.2f}ms, clone={clone_avg:.2f}ms" + + +@pytest.mark.performance +@pytest.mark.benchmark +def test_benchmark_summary( + empty_git_repo: pathlib.Path, + empty_svn_repo: pathlib.Path, + empty_hg_repo: pathlib.Path, + tmp_path: pathlib.Path, + hgconfig: pathlib.Path, +) -> None: + """Comprehensive benchmark summary comparing all VCS copy methods. + + This test provides a single-run summary of all copy methods for quick + comparison. Run with: pytest -v -s -m benchmark --run-performance + """ + import os + import shutil + import subprocess + import time + + env = {**os.environ, "HGRCPATH": str(hgconfig)} + + def measure_once( + name: str, + src: pathlib.Path, + dst: pathlib.Path, + copy_fn: t.Callable[[], t.Any], + ) -> float: + if dst.exists(): + shutil.rmtree(dst) + start = time.perf_counter() + copy_fn() + duration = (time.perf_counter() - start) * 1000 + if dst.exists(): + shutil.rmtree(dst) + return duration + + results: dict[str, dict[str, float]] = {} + + # SVN benchmarks + svn_copytree_dst = tmp_path / "svn_copytree" + svn_hotcopy_dst = tmp_path / "svn_hotcopy" + results["SVN"] = { + "copytree": measure_once( + "svn_copytree", + empty_svn_repo, + svn_copytree_dst, + lambda: shutil.copytree(empty_svn_repo, svn_copytree_dst), + ), + "native": measure_once( + "svn_hotcopy", + empty_svn_repo, + svn_hotcopy_dst, + lambda: subprocess.run( + ["svnadmin", "hotcopy", str(empty_svn_repo), str(svn_hotcopy_dst)], + check=True, + capture_output=True, + ), + ), + } + + # Git benchmarks + git_copytree_dst = tmp_path / "git_copytree" + git_clone_dst = tmp_path / "git_clone" + results["Git"] = { + "copytree": measure_once( + "git_copytree", + empty_git_repo, + git_copytree_dst, + lambda: shutil.copytree(empty_git_repo, git_copytree_dst), + ), + "native": measure_once( + "git_clone", + empty_git_repo, + git_clone_dst, + lambda: subprocess.run( + ["git", "clone", "--local", str(empty_git_repo), str(git_clone_dst)], + check=True, + capture_output=True, + ), + ), + } + + # Hg benchmarks + hg_copytree_dst = tmp_path / "hg_copytree" + hg_clone_dst = tmp_path / "hg_clone" + results["Hg"] = { + "copytree": measure_once( + "hg_copytree", + empty_hg_repo, + hg_copytree_dst, + lambda: shutil.copytree(empty_hg_repo, hg_copytree_dst), + ), + "native": measure_once( + "hg_clone", + empty_hg_repo, + hg_clone_dst, + lambda: subprocess.run( + ["hg", "clone", str(empty_hg_repo), str(hg_clone_dst)], + check=True, + capture_output=True, + env=env, + ), + ), + } + + # Print summary + print("\n" + "=" * 70) + print("VCS Copy Method Benchmark Summary") + print("=" * 70) + print( + f"{'VCS':<6} {'copytree (ms)':<15} {'native (ms)':<15} " + f"{'speedup':<10} {'winner'}" + ) + print("-" * 70) + for vcs, times in results.items(): + speedup = times["copytree"] / times["native"] + winner = "native" if times["native"] < times["copytree"] else "copytree" + print( + f"{vcs:<6} {times['copytree']:<15.2f} {times['native']:<15.2f} " + f"{speedup:<10.2f}x {winner}" + ) + print("=" * 70) + print("\nRecommendations:") + for vcs, times in results.items(): + if times["native"] < times["copytree"]: + print( + f" - {vcs}: Use native copy (svnadmin hotcopy / git clone / hg clone)" + ) + else: + print(f" - {vcs}: Use shutil.copytree") + print("=" * 70) + + +@pytest.mark.performance +@pytest.mark.benchmark +def test_benchmark_reflink_vs_copytree( + empty_git_repo: pathlib.Path, + tmp_path: pathlib.Path, +) -> None: + """Benchmark cp --reflink=auto vs shutil.copytree. + + On Btrfs/XFS: reflink should be significantly faster (10-100x). + On ext4: both should be similar (reflink falls back to regular copy). + + This benchmark validates the copytree_reflink() optimization choice: + - If reflink is faster, we get a performance win on CoW filesystems + - If similar, we have no regression on traditional filesystems + """ + import shutil + import subprocess + + def copytree_copy(src: pathlib.Path, dst: pathlib.Path) -> None: + shutil.copytree(src, dst) + + def reflink_copy(src: pathlib.Path, dst: pathlib.Path) -> None: + dst.parent.mkdir(parents=True, exist_ok=True) + subprocess.run( + ["cp", "-a", "--reflink=auto", str(src), str(dst)], + check=True, + capture_output=True, + timeout=60, + ) + + # Benchmark both methods + copytree_times = _benchmark_copy( + empty_git_repo, tmp_path / "copytree", copytree_copy + ) + reflink_times = _benchmark_copy(empty_git_repo, tmp_path / "reflink", reflink_copy) + + # Calculate statistics + copytree_avg = sum(copytree_times) / len(copytree_times) + copytree_min = min(copytree_times) + reflink_avg = sum(reflink_times) / len(reflink_times) + reflink_min = min(reflink_times) + + # Determine filesystem type hint + speedup = copytree_avg / reflink_avg + fs_hint = ( + "CoW filesystem (Btrfs/XFS)" if speedup > 2 else "Traditional filesystem (ext4)" + ) + + # Report results + print("\n" + "=" * 60) + print("Reflink vs Copytree Benchmark Results") + print("=" * 60) + print(f"shutil.copytree: avg={copytree_avg:.2f}ms, min={copytree_min:.2f}ms") + print(f"cp --reflink=auto: avg={reflink_avg:.2f}ms, min={reflink_min:.2f}ms") + print(f"Speedup: {speedup:.2f}x") + print(f"Likely filesystem: {fs_hint}") + print(f"Winner: {'reflink' if reflink_avg < copytree_avg else 'copytree'}") + print("=" * 60) + + # Test always passes - it's informational + # Both methods should complete successfully + assert copytree_avg > 0 and reflink_avg > 0 diff --git a/uv.lock b/uv.lock index 20d79f4ee..2e45cd963 100644 --- a/uv.lock +++ b/uv.lock @@ -50,6 +50,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, ] +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + [[package]] name = "beautifulsoup4" version = "4.14.3" @@ -494,6 +503,7 @@ dev = [ { name = "mypy" }, { name = "myst-parser" }, { name = "pytest" }, + { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "pytest-mock" }, { name = "pytest-rerunfailures" }, @@ -533,6 +543,7 @@ lint = [ testing = [ { name = "gp-libs" }, { name = "pytest" }, + { name = "pytest-asyncio" }, { name = "pytest-mock" }, { name = "pytest-rerunfailures" }, { name = "pytest-watcher" }, @@ -556,6 +567,7 @@ dev = [ { name = "mypy" }, { name = "myst-parser" }, { name = "pytest" }, + { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "pytest-mock" }, { name = "pytest-rerunfailures" }, @@ -589,6 +601,7 @@ lint = [ testing = [ { name = "gp-libs" }, { name = "pytest" }, + { name = "pytest-asyncio" }, { name = "pytest-mock" }, { name = "pytest-rerunfailures" }, { name = "pytest-watcher" }, @@ -851,6 +864,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, ] +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + [[package]] name = "pytest-cov" version = "7.0.0"