diff --git a/ast_canopy/ast_canopy/api.py b/ast_canopy/ast_canopy/api.py index f66a49d7..0af065a5 100644 --- a/ast_canopy/ast_canopy/api.py +++ b/ast_canopy/ast_canopy/api.py @@ -433,7 +433,7 @@ def parse_declarations_from_source( # 3. CUDA Toolkit include directories # 4. Additional include directories command_line_options = [ - "clang++", + clang_binary if not None else "clang++", *clang_verbose_flag, "--cuda-device-only", "-xcuda", diff --git a/numbast/src/numbast/static/struct.py b/numbast/src/numbast/static/struct.py index 18223d5a..ac470014 100644 --- a/numbast/src/numbast/static/struct.py +++ b/numbast/src/numbast/static/struct.py @@ -526,13 +526,20 @@ def _render(self): """Render all struct constructors.""" for convop_decl in self._convop_decls: - renderer = StaticStructConversionOperatorRenderer( - struct_name=self._struct_name, - struct_type_class=self._struct_type_class, - struct_type_name=self._struct_type_name, - header_path=self._header_path, - convop_decl=convop_decl, - ) + try: + renderer = StaticStructConversionOperatorRenderer( + struct_name=self._struct_name, + struct_type_class=self._struct_type_class, + struct_type_name=self._struct_type_name, + header_path=self._header_path, + convop_decl=convop_decl, + ) + except TypeNotFoundError as e: + warnings.warn( + f"{e._type_name} is not known to Numbast. Skipping " + f"binding for {str(convop_decl)}" + ) + continue renderer._render() self._python_rendered += renderer._python_rendered diff --git a/numbast/src/numbast/static/tests/data/bf16.cuh b/numbast/src/numbast/static/tests/data/bf16.cuh index 3a9a0d71..042d3db1 100644 --- a/numbast/src/numbast/static/tests/data/bf16.cuh +++ b/numbast/src/numbast/static/tests/data/bf16.cuh @@ -10,6 +10,10 @@ nv_bfloat16 inline __device__ add(nv_bfloat16 a, nv_bfloat16 b) { return a + b; } +__nv_bfloat16 inline __device__ add2(__nv_bfloat16 a, __nv_bfloat16 b) { + return a + b; +} + __nv_bfloat16_raw inline __device__ bf16_to_raw(nv_bfloat16 a) { return __nv_bfloat16_raw(a); } diff --git a/numbast/src/numbast/static/tests/test_bf16_support.py b/numbast/src/numbast/static/tests/test_bf16_support.py index 03befc87..6dbc17f3 100644 --- a/numbast/src/numbast/static/tests/test_bf16_support.py +++ b/numbast/src/numbast/static/tests/test_bf16_support.py @@ -10,16 +10,19 @@ def test_bindings_from_bf16(make_binding): binding1 = res1["bindings"] add = binding1["add"] + add2 = binding1["add2"] @cuda.jit def kernel(arr): x = add(bfloat16(3.14), bfloat16(3.14)) arr[0] = float32(x) + arr[1] = float32(add2(bfloat16(3.14), bfloat16(3.14))) - arr = cuda.device_array((1,), dtype="float32") + arr = cuda.device_array((2,), dtype="float32") kernel[1, 1](arr) assert pytest.approx(arr[0], 1e-2) == 6.28 + assert pytest.approx(arr[1], 1e-2) == 6.28 # Check that bfloat16 is imported assert "from numba.cuda.types import bfloat16" in res1["src"] diff --git a/numbast/src/numbast/static/types.py b/numbast/src/numbast/static/types.py index 681d3bd1..533df4a5 100644 --- a/numbast/src/numbast/static/types.py +++ b/numbast/src/numbast/static/types.py @@ -30,6 +30,7 @@ def register_enum_type_str( Parameters: ctype_enum_name (str): The C++ enum type name to register (as it appears in C/C++ headers). enum_name (str): The enum identifier to use inside the generated Numba type string (becomes the first argument to `IntEnumMember`). + underlying_integer_type (str): The underlying integer type to use for the enum. """ global CTYPE_TO_NBTYPE_STR diff --git a/numbast/src/numbast/tools/tests/test_prefix_removal.py b/numbast/src/numbast/tools/tests/test_prefix_removal.py index 33a52f10..054e5c82 100644 --- a/numbast/src/numbast/tools/tests/test_prefix_removal.py +++ b/numbast/src/numbast/tools/tests/test_prefix_removal.py @@ -18,6 +18,9 @@ def test_prefix_removal(run_in_isolated_folder, arch_str): symbols = res["symbols"] alls = symbols["__all__"] + binding_path = res["binding_path"] + print(f"{binding_path=}") + assert run_result.exit_code == 0 # Verify that the function is exposed as "foo" (without the "prefix_" prefix) diff --git a/numbast/tests/data/sample_function.cuh b/numbast/tests/data/sample_function.cuh new file mode 100644 index 00000000..cc0ba3fc --- /dev/null +++ b/numbast/tests/data/sample_function.cuh @@ -0,0 +1,8 @@ +// clang-format off +// SPDX-FileCopyrightText: Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 +// clang-format on + +#pragma once + +__device__ void func_with_void_ptr_arg(void *ptr) { (void)ptr; }