tch_plus/wrappers/
utils.rs

1use crate::TchError;
2use libc::c_char;
3use std::io;
4
5// This returns None on the null pointer. If not null, the pointer gets
6// freed.
7pub(super) unsafe fn ptr_to_string(ptr: *mut c_char) -> Option<String> {
8    if !ptr.is_null() {
9        let str = std::ffi::CStr::from_ptr(ptr).to_string_lossy().into_owned();
10        libc::free(ptr as *mut libc::c_void);
11        Some(str)
12    } else {
13        None
14    }
15}
16
17pub(super) fn read_and_clean_error() -> Result<(), TchError> {
18    unsafe {
19        match ptr_to_string(torch_sys_plus::get_and_reset_last_err()) {
20            None => Ok(()),
21            Some(c_error) => Err(TchError::Torch(c_error)),
22        }
23    }
24}
25
26macro_rules! unsafe_torch {
27    ($e:expr) => {{
28        let v = unsafe { $e };
29        crate::wrappers::utils::read_and_clean_error().unwrap();
30        v
31    }};
32}
33
34macro_rules! unsafe_torch_err {
35    ($e:expr) => {{
36        let v = unsafe { $e };
37        crate::wrappers::utils::read_and_clean_error()?;
38        v
39    }};
40}
41
42// Be cautious when using this function as the returned CString should be stored
43// in a variable when using as_ptr. Otherwise dangling pointer issues are likely
44// to happen.
45pub(super) fn path_to_cstring<T: AsRef<std::path::Path>>(
46    path: T,
47) -> Result<std::ffi::CString, TchError> {
48    let path = path.as_ref();
49    match path.to_str() {
50        Some(path) => Ok(std::ffi::CString::new(path)?),
51        None => Err(TchError::Io(io::Error::new(
52            io::ErrorKind::Other,
53            format!("path {path:?} cannot be converted to UTF-8"),
54        ))),
55    }
56}
57
58/// Sets the random seed used by torch.
59pub fn manual_seed(seed: i64) {
60    unsafe_torch!(torch_sys_plus::at_manual_seed(seed))
61}
62
63/// Get the number of threads used by torch for inter-op parallelism.
64pub fn get_num_interop_threads() -> i32 {
65    unsafe_torch!(torch_sys_plus::at_get_num_interop_threads())
66}
67
68/// Get the number of threads used by torch in parallel regions.
69pub fn get_num_threads() -> i32 {
70    unsafe_torch!(torch_sys_plus::at_get_num_threads())
71}
72
73/// Set the number of threads used by torch for inter-op parallelism.
74pub fn set_num_interop_threads(n_threads: i32) {
75    unsafe_torch!(torch_sys_plus::at_set_num_interop_threads(n_threads))
76}
77
78/// Set the number of threads used by torch in parallel regions.
79pub fn set_num_threads(n_threads: i32) {
80    unsafe_torch!(torch_sys_plus::at_set_num_threads(n_threads))
81}
82
83pub fn has_openmp() -> bool {
84    unsafe_torch!(torch_sys_plus::at_context_has_openmp())
85}
86
87pub fn has_mkl() -> bool {
88    unsafe_torch!(torch_sys_plus::at_context_has_mkl())
89}
90pub fn has_lapack() -> bool {
91    unsafe_torch!(torch_sys_plus::at_context_has_lapack())
92}
93pub fn has_mkldnn() -> bool {
94    unsafe_torch!(torch_sys_plus::at_context_has_mkldnn())
95}
96pub fn has_magma() -> bool {
97    unsafe_torch!(torch_sys_plus::at_context_has_magma())
98}
99pub fn has_cuda() -> bool {
100    unsafe_torch!(torch_sys_plus::at_context_has_cuda())
101}
102pub fn has_cudart() -> bool {
103    unsafe_torch!(torch_sys_plus::at_context_has_cudart())
104}
105pub fn has_cusolver() -> bool {
106    unsafe_torch!(torch_sys_plus::at_context_has_cusolver())
107}
108pub fn has_hip() -> bool {
109    unsafe_torch!(torch_sys_plus::at_context_has_hip())
110}
111pub fn has_ipu() -> bool {
112    unsafe_torch!(torch_sys_plus::at_context_has_ipu())
113}
114pub fn has_xla() -> bool {
115    unsafe_torch!(torch_sys_plus::at_context_has_xla())
116}
117pub fn has_lazy() -> bool {
118    unsafe_torch!(torch_sys_plus::at_context_has_lazy())
119}
120pub fn has_mps() -> bool {
121    unsafe_torch!(torch_sys_plus::at_context_has_mps())
122}
123pub fn version_cudnn() -> i64 {
124    unsafe_torch!(torch_sys_plus::at_context_version_cudnn())
125}
126pub fn version_cudart() -> i64 {
127    unsafe_torch!(torch_sys_plus::at_context_version_cudart())
128}
129
130/// Check whether the vulkan backend is available. None that this
131/// backend is not included by default as of PyTorch 2.0.0.
132/// https://pytorch.org/tutorials/prototype/vulkan_workflow.html#building-pytorch-with-vulkan-backend
133pub fn has_vulkan() -> bool {
134    crate::Tensor::is_vulkan_available()
135}
136
137/// Quantization engines
138#[allow(clippy::upper_case_acronyms)]
139#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
140pub enum QEngine {
141    NoQEngine,
142    FBGEMM,
143    QNNPACK,
144}
145
146impl QEngine {
147    fn to_cint(self) -> i32 {
148        match self {
149            QEngine::NoQEngine => 0,
150            QEngine::FBGEMM => 1,
151            QEngine::QNNPACK => 2,
152        }
153    }
154    pub fn set(self) -> Result<(), TchError> {
155        unsafe_torch_err!(torch_sys_plus::at_set_qengine(self.to_cint()));
156        Ok(())
157    }
158}