Skip to content

Commit e3ef048

Browse files
committed
aarch64 python build
1 parent 41e8ed0 commit e3ef048

File tree

3 files changed

+15
-19
lines changed

3 files changed

+15
-19
lines changed

Diff for: .github/scripts/generate_binary_build_matrix.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@
5050
"release": "12.4",
5151
}
5252

53-
CUDA_AARCH64_ARCHES = ["12.8-aarch64", "12.6-aarch64", "11.8-aarch64"]
53+
CUDA_AARCH64_ARCHES = ["12.8-aarch64", "12.6-aarch64"]
5454

5555
PACKAGE_TYPES = ["wheel", "conda", "libtorch"]
5656
CXX11_ABI = "cxx11-abi"

Diff for: MODULE.bazel

+7-7
Original file line numberDiff line numberDiff line change
@@ -80,13 +80,13 @@ http_archive(
8080
# It is possible to specify a wheel file to use as the libtorch source by providing the URL below and
8181
# using the build flag `--//toolchains/dep_src:torch="whl"`
8282

83-
# http_archive(
84-
# name = "torch_whl",
85-
# build_file = "@//third_party/libtorch:BUILD",
86-
# strip_prefix = "torch",
87-
# type = "zip",
88-
# urls = ["https://pypi.jetson-ai-lab.dev/jp6/cu126/+f/52c/2cbdd62b78f32/torch-2.7.0-cp310-cp310-linux_aarch64.whl#sha256=52c2cbdd62b78f32c51fa178212e4721241a2ba9e0c4d7d690dd808bd890d51b"],
89-
# )
83+
http_archive(
84+
name = "torch_whl",
85+
build_file = "@//third_party/libtorch:BUILD",
86+
strip_prefix = "torch",
87+
type = "zip",
88+
urls = ["https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250415%2Bcu128-cp310-cp310-manylinux_2_28_aarch64.whl"],
89+
)
9090

9191
# Download these tarballs manually from the NVIDIA website
9292
# Either place them in the distdir directory in third_party and use the --distdir flag

Diff for: setup.py

+7-11
Original file line numberDiff line numberDiff line change
@@ -188,6 +188,10 @@ def build_libtorchtrt_cxx11_abi(
188188
if IS_JETPACK:
189189
cmd.append("--config=jetpack")
190190

191+
if IS_SBSA:
192+
if CI_BUILD:
193+
cmd.append("--//toolchains/dep_src:torch=whl")
194+
191195
if CI_BUILD:
192196
cmd.append("--platforms=//toolchains:ci_rhel_x86_64_linux")
193197
print("CI based build")
@@ -456,14 +460,6 @@ def run(self):
456460
package_data = {}
457461

458462
if not (PY_ONLY or NO_TS):
459-
tensorrt_windows_external_dir = (
460-
lambda: subprocess.check_output(
461-
[BAZEL_EXE, "query", "@tensorrt_win//:nvinfer", "--output", "location"]
462-
)
463-
.decode("ascii")
464-
.strip()
465-
.split("/BUILD.bazel")[0]
466-
)
467463

468464
tensorrt_x86_64_external_dir = (
469465
lambda: subprocess.check_output(
@@ -493,11 +489,11 @@ def run(self):
493489
)
494490

495491
if IS_SBSA:
496-
tensorrt_linux_external_dir = tensorrt_sbsa_external_dir()
492+
tensorrt_linux_external_dir = tensorrt_sbsa_external_dir
497493
elif IS_JETPACK:
498-
tensorrt_linux_external_dir = tensorrt_jetpack_external_dir()
494+
tensorrt_linux_external_dir = tensorrt_jetpack_external_dir
499495
else:
500-
tensorrt_linux_external_dir = tensorrt_x86_64_external_dir()
496+
tensorrt_linux_external_dir = tensorrt_x86_64_external_dir
501497

502498
tensorrt_windows_external_dir = (
503499
lambda: subprocess.check_output(

0 commit comments

Comments
 (0)