Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 29 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,32 @@ core
build/
dist/
build.sh
*.job
*.job

# Testing
.pytest_cache/
.coverage
htmlcov/
coverage.xml
*.py[cod]
*$py.class

# Claude settings
.claude/*

# Virtual environments
venv/
env/
ENV/
.venv/

# IDE files
.idea/
*.swp
*.swo
.DS_Store

# Build artifacts
*.so
.eggs/
.Python
2,125 changes: 2,125 additions & 0 deletions poetry.lock

Large diffs are not rendered by default.

105 changes: 105 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
[tool.poetry]
name = "hiera-transformer"
version = "0.1.4"
description = "A fast, powerful, and simple hierarchical vision transformer"
authors = ["Chaitanya Ryali", "Daniel Bolya"]
license = "Apache-2.0"
readme = "README.md"
homepage = "https://github.com/facebookresearch/hiera"
repository = "https://github.com/facebookresearch/hiera"
keywords = ["vision", "transformer", "hierarchical", "deep-learning", "pytorch"]
classifiers = [
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
]
packages = [{include = "hiera"}]

[tool.poetry.dependencies]
python = "^3.8"
torch = ">=1.8.1"
timm = ">=0.4.12"
tqdm = "*"
packaging = "*"

[tool.poetry.group.dev.dependencies]
pytest = "^7.4.0"
pytest-cov = "^4.1.0"
pytest-mock = "^3.11.0"

[tool.poetry.scripts]
test = "pytest:main"
tests = "pytest:main"

[tool.pytest.ini_options]
minversion = "7.0"
testpaths = ["tests"]
python_files = ["test_*.py", "*_test.py"]
python_classes = ["Test*", "*Tests"]
python_functions = ["test_*"]
addopts = [
"-ra",
"--strict-markers",
"--cov=hiera",
"--cov-report=term-missing",
"--cov-report=html:htmlcov",
"--cov-report=xml:coverage.xml",
"--cov-fail-under=80",
"-vv",
]
markers = [
"unit: marks tests as unit tests (fast, isolated)",
"integration: marks tests as integration tests (may be slower)",
"slow: marks tests as slow (deselect with '-m \"not slow\"')",
]
filterwarnings = [
"error",
"ignore::UserWarning",
"ignore::DeprecationWarning",
]

[tool.coverage.run]
source = ["hiera"]
branch = true
omit = [
"*/tests/*",
"*/test_*",
"*/__init__.py",
"*/setup.py",
"*/examples/*",
]

[tool.coverage.report]
exclude_lines = [
"pragma: no cover",
"def __repr__",
"if self.debug:",
"if __name__ == .__main__.:",
"raise AssertionError",
"raise NotImplementedError",
"if 0:",
"if False:",
"@abstractmethod",
]
precision = 2
show_missing = true
skip_covered = false

[tool.coverage.html]
directory = "htmlcov"

[tool.coverage.xml]
output = "coverage.xml"

[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
Empty file added tests/__init__.py
Empty file.
110 changes: 110 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
"""Shared pytest fixtures and configuration for all tests."""

import sys
import tempfile
from pathlib import Path
from typing import Generator

import pytest
import torch


@pytest.fixture
def temp_dir() -> Generator[Path, None, None]:
"""Create a temporary directory for test files."""
with tempfile.TemporaryDirectory() as tmpdir:
yield Path(tmpdir)


@pytest.fixture
def mock_model_config():
"""Provide a mock model configuration dictionary."""
return {
"embed_dim": 96,
"num_heads": 1,
"stages": [2, 3, 16, 3],
"q_stride": [2, 2, 2, 1],
"mask_unit_size": [8, 4, 2, 1],
"patch_kernel": [7, 7],
"patch_stride": [4, 4],
"patch_padding": [3, 3],
"mlp_ratio": 4.0,
"drop_path_rate": 0.0,
"norm_layer": "LayerNorm",
"head_drop_rate": 0.0,
"head_init_scale": 0.001,
"sep_pos_embed": False,
"abs_win_pos_embed": False,
"return_intermediates": False,
"checkpoint_blocks": [],
"head_pre_norm": False,
}


@pytest.fixture
def device():
"""Return the appropriate device for testing."""
return torch.device("cuda" if torch.cuda.is_available() else "cpu")


@pytest.fixture
def sample_image_tensor(device):
"""Create a sample image tensor for testing."""
batch_size = 2
channels = 3
height = 224
width = 224
return torch.randn(batch_size, channels, height, width, device=device)


@pytest.fixture
def mock_checkpoint_path(temp_dir):
"""Create a mock checkpoint file path."""
checkpoint_path = temp_dir / "test_checkpoint.pth"
# Create a simple checkpoint structure
checkpoint = {
"model": {},
"optimizer": {},
"epoch": 1,
"best_acc": 0.95,
}
torch.save(checkpoint, checkpoint_path)
return checkpoint_path


@pytest.fixture(autouse=True)
def reset_random_state():
"""Reset random state before each test for reproducibility."""
import random
import numpy as np

random.seed(42)
np.random.seed(42)
torch.manual_seed(42)
if torch.cuda.is_available():
torch.cuda.manual_seed(42)
torch.cuda.manual_seed_all(42)


@pytest.fixture
def capture_stdout(monkeypatch):
"""Capture stdout for testing print statements."""
import io
buffer = io.StringIO()
monkeypatch.setattr(sys, 'stdout', buffer)
yield buffer
monkeypatch.undo()


@pytest.fixture
def mock_timm_model(monkeypatch):
"""Mock timm model creation for testing."""
def mock_create_model(*args, **kwargs):
import torch.nn as nn
return nn.Identity()

monkeypatch.setattr("timm.create_model", mock_create_model, raising=False)


# Add path to allow imports from the hiera package
sys.path.insert(0, str(Path(__file__).parent.parent))
Empty file added tests/integration/__init__.py
Empty file.
132 changes: 132 additions & 0 deletions tests/test_setup_validation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
"""Validation tests to ensure the testing infrastructure is properly configured."""

import sys
from pathlib import Path

import pytest


class TestInfrastructureSetup:
"""Test class to validate the testing infrastructure setup."""

@pytest.mark.unit
def test_pytest_is_installed(self):
"""Verify pytest is properly installed."""
assert "pytest" in sys.modules or __import__("pytest")

@pytest.mark.unit
def test_coverage_is_installed(self):
"""Verify pytest-cov is properly installed."""
try:
import pytest_cov
assert pytest_cov is not None
except ImportError:
pytest.skip("pytest-cov not yet installed")

@pytest.mark.unit
def test_mock_is_installed(self):
"""Verify pytest-mock is properly installed."""
try:
import pytest_mock
assert pytest_mock is not None
except ImportError:
pytest.skip("pytest-mock not yet installed")

@pytest.mark.unit
def test_fixtures_are_available(self, temp_dir, mock_model_config, device):
"""Test that custom fixtures from conftest.py are available."""
assert isinstance(temp_dir, Path)
assert temp_dir.exists()

assert isinstance(mock_model_config, dict)
assert "embed_dim" in mock_model_config

assert device is not None
assert str(device) in ["cpu", "cuda", "cuda:0"]

@pytest.mark.unit
def test_temp_dir_fixture(self, temp_dir):
"""Test the temporary directory fixture works correctly."""
test_file = temp_dir / "test.txt"
test_file.write_text("Hello, testing!")

assert test_file.exists()
assert test_file.read_text() == "Hello, testing!"

@pytest.mark.unit
def test_markers_are_defined(self):
"""Test that custom markers are properly defined."""
# These markers should be available
expected_markers = ["unit", "integration", "slow"]
for marker in expected_markers:
# Check if marker can be used without warnings
marker_obj = getattr(pytest.mark, marker)
assert marker_obj is not None

@pytest.mark.integration
def test_integration_marker(self):
"""Test that integration marker works."""
assert True

@pytest.mark.slow
def test_slow_marker(self):
"""Test that slow marker works."""
assert True

@pytest.mark.unit
def test_project_structure(self):
"""Verify the project structure is set up correctly."""
project_root = Path(__file__).parent.parent

# Check essential directories exist
assert (project_root / "tests").exists()
assert (project_root / "tests" / "unit").exists()
assert (project_root / "tests" / "integration").exists()

# Check __init__.py files exist
assert (project_root / "tests" / "__init__.py").exists()
assert (project_root / "tests" / "unit" / "__init__.py").exists()
assert (project_root / "tests" / "integration" / "__init__.py").exists()

# Check configuration files
assert (project_root / "pyproject.toml").exists()

@pytest.mark.unit
def test_coverage_configuration(self):
"""Test that coverage is properly configured."""
project_root = Path(__file__).parent.parent
pyproject_path = project_root / "pyproject.toml"

assert pyproject_path.exists()

content = pyproject_path.read_text()
assert "[tool.coverage.run]" in content
assert "[tool.coverage.report]" in content
assert "source = [\"hiera\"]" in content

@pytest.mark.unit
def test_sample_image_tensor_fixture(self, sample_image_tensor):
"""Test the sample image tensor fixture."""
assert sample_image_tensor.shape == (2, 3, 224, 224)
assert sample_image_tensor.dtype == torch.float32

@pytest.mark.unit
def test_mock_checkpoint_fixture(self, mock_checkpoint_path):
"""Test the mock checkpoint fixture."""
assert mock_checkpoint_path.exists()
assert mock_checkpoint_path.suffix == ".pth"

checkpoint = torch.load(mock_checkpoint_path)
assert "model" in checkpoint
assert "optimizer" in checkpoint
assert checkpoint["epoch"] == 1


def test_basic_assertion():
"""A simple test to verify pytest runs correctly."""
assert 1 + 1 == 2
assert True is not False
assert [] == []


import torch # Import at the end to test if it's available
Empty file added tests/unit/__init__.py
Empty file.
Loading