Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
248 changes: 246 additions & 2 deletions backends/pixi-build-ros/pixi.lock

Large diffs are not rendered by default.

14 changes: 12 additions & 2 deletions backends/pixi-build-ros/pixi.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,22 @@ platforms = ["osx-arm64", "win-64", "linux-64"]
preview = ["pixi-build"]

[dependencies]
pytest = "*"
pydantic = ">=2.8.2,<3"
ruff = "*"
pixi-build-ros = { path = "." }

[tasks]
[environments]
default = { features = [], solve-group = "default" }
test = { features = ["test"], solve-group = "default" }

[feature.test.tasks]
test = "pytest tests"

[feature.test.dependencies]
pixi-pycharm = ">=0.0.8,<0.1"
pytest = "*"
py-pixi-build-backend = { path = "../../py-pixi-build-backend" }

[package.build.backend]
name = "pixi-build-python"
version = "*"
Expand All @@ -27,6 +36,7 @@ hatchling = "*"

[package.run-dependencies]
rosdistro = "*"
pydantic = ">=2.8.2,<3"
catkin_pkg = "*"
pyyaml = "*"
pixi-build-api-version = ">=2,<3"
Expand Down
1 change: 1 addition & 0 deletions backends/pixi-build-ros/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ dependencies = [
"pytest",
"toml",
"pyyaml",
"pydantic",
"py-pixi-build-backend",
]
name = "pixi-build-ros"
Expand Down
2 changes: 1 addition & 1 deletion backends/pixi-build-ros/robostack.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ draco:
eigen:
robostack: [eigen]
emacs:
# Wait until https://github.com/conda-forge/gtk3-feedstock/pull/74 is resolved
# Wait until https://github.com/robostack-forge/gtk3-feedstock/pull/74 is resolved
# and emacs is rebuilt for harfbuzz=10
# robostack: [emacs]
robostack: []
Expand Down
129 changes: 93 additions & 36 deletions backends/pixi-build-ros/src/pixi_build_ros/ros_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,11 @@
Python generator implementation using Python bindings.
"""

from dataclasses import dataclass
from pathlib import Path
import os
import pydantic
from importlib.resources import files

from typing import Dict, Optional, List, Any
from pixi_build_backend.types.generated_recipe import (
GenerateRecipeProtocol,
Expand All @@ -19,40 +22,74 @@

from .build_script import BuildScriptContext, BuildPlatform
from .distro import Distro
from .utils import get_build_input_globs, package_xml_to_conda_requirements, convert_package_xml_to_catkin_package, \
get_package_xml_content
from .utils import (
get_build_input_globs,
package_xml_to_conda_requirements,
convert_package_xml_to_catkin_package,
get_package_xml_content,
load_package_map_data,
)


def _parse_str_as_abs_path(value: str | Path, manifest_root: Path) -> Path:
"""Parse a string as a Path."""
# Ensure the debug directory is a Path object
if isinstance(value, str):
value = Path(value)
# Ensure it's an absolute path
if not value.is_absolute():
# Convert to absolute path relative to manifest root
return (manifest_root / value).resolve()
return value


@dataclass
class ROSBackendConfig:
class ROSBackendConfig(pydantic.BaseModel, extra="forbid"):
"""ROS backend configuration."""

noarch: Optional[bool] = None
# Environment variables to set during the build
env: Optional[Dict[str, str]] = None
# Directory for debug files of this script
debug_dir: Optional[Path] = None
debug_dir: Optional[Path] = pydantic.Field(default=None, alias="debug-dir")
# Extra input globs to include in the build hash
extra_input_globs: Optional[List[str]] = None
extra_input_globs: Optional[List[str]] = pydantic.Field(default=None, alias="extra-input-globs")
# ROS distribution to use, e.g., "foxy", "galactic", "humble"
# TODO: This should be figured out in some other way, not from the config.
distro: Optional[str] = None

# Extra package mappings to use in the build
extra_package_mappings: List[Path] = pydantic.Field(default_factory=list, alias="extra-package-mappings")

def is_noarch(self) -> bool:
"""Whether to build a noarch package or a platform-specific package."""
return self.noarch is None or self.noarch

def get_debug_dir(self) -> Optional[Path]:
"""Get debug directory if set."""
if self.debug_dir is not None:
# Ensure the debug directory is a Path object
if isinstance(self.debug_dir, str):
self.debug_dir = Path(self.debug_dir)
# Ensure it's an absolute path
if not self.debug_dir.is_absolute():
# Convert to absolute path relative to the current working directory
self.debug_dir = Path.cwd() / self.debug_dir
return self.debug_dir
@pydantic.field_validator("debug_dir", mode="before")
@classmethod
def _parse_debug_dir(cls, value, info: pydantic.ValidationInfo) -> Optional[Path]:
"""Parse debug directory if set."""
if value is None:
return None
base_path = Path(os.getcwd())
if info.context and "manifest_root" in info.context:
base_path = Path(info.context["manifest_root"])
return _parse_str_as_abs_path(value, base_path)

@pydantic.field_validator("extra_package_mappings", mode="before")
@classmethod
def _parse_package_mappings(cls, input_value, info: pydantic.ValidationInfo) -> Optional[List[Path]]:
"""Parse additional package mappings if set."""
if input_value is None:
return []
base_path = Path(os.getcwd())
if info.context and "manifest_root" in info.context:
base_path = Path(info.context["manifest_root"])

res = []
for path_value in input_value:
res.append(_parse_str_as_abs_path(path_value, base_path))
return res


class ROSGenerator(GenerateRecipeProtocol):
"""ROS recipe generator using Python bindings."""
Expand All @@ -65,14 +102,15 @@ def generate_recipe(
host_platform: Platform,
_python_params: Optional[PythonParams] = None,
) -> GeneratedRecipe:
"""Generate a recipe for a Python package."""
backend_config: ROSBackendConfig = ROSBackendConfig(**config)

"""Generate a recipe for a Python package."""
manifest_root = Path(manifest_path)
backend_config: ROSBackendConfig = ROSBackendConfig.model_validate(
config, context={"manifest_root": manifest_root}
)

# Setup ROS distro first
distro = Distro(backend_config.distro)

# Create metadata provider for package.xml
package_xml_path = manifest_root / "package.xml"
metadata_provider = ROSPackageXmlMetadataProvider(str(package_xml_path), distro)
Expand All @@ -84,11 +122,30 @@ def generate_recipe(
package_xml_str = get_package_xml_content(manifest_root)
package_xml = convert_package_xml_to_catkin_package(package_xml_str)

# load package map

# TODO: Currently hardcoded and not able to override, this should be configurable
package_files = files("pixi_build_ros")
robostack_file = package_files / "robostack.yaml"
# workaround for from source install
if not robostack_file.is_file():
robostack_file = Path(__file__).parent.parent.parent / "robostack.yaml"

package_map_data = load_package_map_data([robostack_file] + backend_config.extra_package_mappings)

# Get requirements from package.xml
package_requirements = package_xml_to_conda_requirements(package_xml, distro, host_platform)
package_requirements = package_xml_to_conda_requirements(package_xml, distro, host_platform, package_map_data)

# Add standard dependencies
build_deps = ["ninja", "python", "setuptools", "git", "git-lfs", "cmake", "cpython"]
build_deps = [
"ninja",
"python",
"setuptools",
"git",
"git-lfs",
"cmake",
"cpython",
]
if host_platform.is_unix:
build_deps.extend(["patch", "make", "coreutils"])
if host_platform.is_windows:
Expand All @@ -112,26 +169,24 @@ def generate_recipe(
requirements = merge_requirements(generated_recipe.recipe.requirements, package_requirements)
generated_recipe.recipe.requirements = requirements


# Determine build platform
build_platform = BuildPlatform.current()

# Generate build script
build_script_context = BuildScriptContext.load_from_template(package_xml, build_platform, manifest_root, distro)
build_script_lines = build_script_context.render()

generated_recipe.recipe.build.script = Script(
generated_recipe.recipe.build.script = Script(
content=build_script_lines,
env=backend_config.env,
)

debug_dir = backend_config.get_debug_dir()
if debug_dir:
if backend_config.debug_dir:
recipe = generated_recipe.recipe.to_yaml()
package = generated_recipe.recipe.package
debug_file_path = debug_dir / f"{package.name.get_concrete()}-{package.version}-recipe.yaml"
debug_file_path = backend_config.debug_dir / f"{package.name.get_concrete()}-{package.version}-recipe.yaml"
debug_file_path.parent.mkdir(parents=True, exist_ok=True)
with open(debug_file_path, 'w') as debug_file:
with open(debug_file_path, "w") as debug_file:
debug_file.write(recipe)

# Test the build script before running to early out.
Expand All @@ -144,22 +199,26 @@ def extract_input_globs_from_build(self, config: ROSBackendConfig, workdir: Path
"""Extract input globs for the build."""
return get_build_input_globs(config, editable)

def default_variants(self, host_platform: Platform ) -> Dict[str, Any]:
def default_variants(self, host_platform: Platform) -> Dict[str, Any]:
"""Get the default variants for the generator."""
variants = {}
if host_platform.is_windows:
variants["cxx_compiler"] = ["vs2019"]
return variants

def merge_requirements(model_requirements: ConditionalRequirements, package_requirements: ConditionalRequirements) -> ConditionalRequirements:

def merge_requirements(
model_requirements: ConditionalRequirements,
package_requirements: ConditionalRequirements,
) -> ConditionalRequirements:
"""Merge two sets of requirements."""
merged = ConditionalRequirements()

# The model requirements are the base, coming from the pixi manifest
# We need to only add the names for non-existing dependencies
def merge_unique_items(
model: List[ItemPackageDependency],
package: List[ItemPackageDependency],
model: List[ItemPackageDependency],
package: List[ItemPackageDependency],
) -> List[ItemPackageDependency]:
"""Merge unique items from source into target."""
result = model
Expand All @@ -179,5 +238,3 @@ def merge_unique_items(

# If the dependency is of type Source in one of the requirements, we need to set them to Source for all variants
return merged


Loading
Loading