1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
use ::{API, Error};
use ffi::*;
impl API {
pub fn create_activation_descriptor() -> Result<cudnnActivationDescriptor_t, Error> {
unsafe { API::ffi_create_activation_descriptor() }
}
pub fn destroy_activation_descriptor(desc: cudnnActivationDescriptor_t) -> Result<(), Error>{
unsafe { API::ffi_destroy_activation_descriptor(desc) }
}
pub fn set_activation_descriptor(desc: cudnnActivationDescriptor_t,
mode: cudnnActivationMode_t,
relu_nan_opt: cudnnNanPropagation_t,
relu_ceiling: f64) -> Result<(), Error> {
unsafe { API::ffi_set_activation_descriptor(desc, mode, relu_nan_opt, relu_ceiling) }
}
pub fn activation_forward(
handle: cudnnHandle_t,
activation_desc: cudnnActivationDescriptor_t,
alpha: *const ::libc::c_void,
x_desc: cudnnTensorDescriptor_t,
x: *const ::libc::c_void,
beta: *const ::libc::c_void,
y_desc: cudnnTensorDescriptor_t,
y: *mut ::libc::c_void
) -> Result<(), Error> {
unsafe { API::ffi_activation_forward(handle, activation_desc, alpha, x_desc, x, beta, y_desc, y) }
}
pub fn activation_backward(
handle: cudnnHandle_t,
activation_desc: cudnnActivationDescriptor_t,
alpha: *const ::libc::c_void,
y_desc: cudnnTensorDescriptor_t,
y: *const ::libc::c_void,
dy_desc: cudnnTensorDescriptor_t,
dy: *const ::libc::c_void,
beta: *const ::libc::c_void,
x_desc: cudnnTensorDescriptor_t,
x: *const ::libc::c_void,
dx_desc: cudnnTensorDescriptor_t,
dx: *mut ::libc::c_void
) -> Result<(), Error> {
unsafe { API::ffi_activation_backward(handle, activation_desc, alpha, y_desc, y, dy_desc, dy, beta, x_desc, x, dx_desc, dx) }
}
unsafe fn ffi_activation_forward(
handle: cudnnHandle_t,
activation_desc: cudnnActivationDescriptor_t,
alpha: *const ::libc::c_void,
src_desc: cudnnTensorDescriptor_t,
src_data: *const ::libc::c_void,
beta: *const ::libc::c_void,
dest_desc: cudnnTensorDescriptor_t,
dest_data: *mut ::libc::c_void
) -> Result<(), Error> {
match cudnnActivationForward(handle, activation_desc, alpha, src_desc, src_data, beta, dest_desc, dest_data) {
cudnnStatus_t::CUDNN_STATUS_SUCCESS => Ok(()),
cudnnStatus_t::CUDNN_STATUS_BAD_PARAM => Err(Error::BadParam("`mode` is invalid or dimensions of input and output tensor differ or `data_type` or strides of the tensors differ.")),
cudnnStatus_t::CUDNN_STATUS_EXECUTION_FAILED => Err(Error::ExecutionFailed("Execution failed to launch on GPU.")),
_ => Err(Error::Unknown("Unable to compute activation forward.")),
}
}
unsafe fn ffi_activation_backward(
handle: cudnnHandle_t,
activation_desc: cudnnActivationDescriptor_t,
alpha: *const ::libc::c_void,
src_desc: cudnnTensorDescriptor_t,
src_data: *const ::libc::c_void,
src_diff_desc: cudnnTensorDescriptor_t,
src_diff_data: *const ::libc::c_void,
beta: *const ::libc::c_void,
dest_desc: cudnnTensorDescriptor_t,
dest_data: *const ::libc::c_void,
dest_diff_desc: cudnnTensorDescriptor_t,
dest_diff_data: *mut ::libc::c_void
) -> Result<(), Error> {
match cudnnActivationBackward(handle, activation_desc, alpha, src_desc, src_data, src_diff_desc, src_diff_data, dest_desc, dest_data, beta, dest_diff_desc, dest_diff_data) {
cudnnStatus_t::CUDNN_STATUS_SUCCESS => Ok(()),
cudnnStatus_t::CUDNN_STATUS_BAD_PARAM => Err(Error::BadParam("`mode` is invalid or dimensions of input and output tensor differ or `data_type` or strides of the tensors differ.")),
cudnnStatus_t::CUDNN_STATUS_NOT_SUPPORTED => Err(Error::NotSupported("`mode` is invalid or dimensions of input and output tensor differ or `data_type` or strides of the tensors differ.")),
cudnnStatus_t::CUDNN_STATUS_EXECUTION_FAILED => Err(Error::ExecutionFailed("Execution failed to launch on GPU.")),
_ => Err(Error::Unknown("Unable to compute activation backward.")),
}
}
unsafe fn ffi_create_activation_descriptor() -> Result<cudnnActivationDescriptor_t, Error> {
let mut desc: cudnnActivationDescriptor_t = ::std::ptr::null_mut();
match cudnnCreateActivationDescriptor(&mut desc) {
cudnnStatus_t::CUDNN_STATUS_SUCCESS => Ok(desc),
cudnnStatus_t::CUDNN_STATUS_ALLOC_FAILED => Err(Error::AllocFailed("The resources could not be allocated.")),
_ => Err(Error::Unknown("Unable to create generic CUDA cuDNN Activation Descriptor.")),
}
}
unsafe fn ffi_destroy_activation_descriptor(desc: cudnnActivationDescriptor_t) -> Result<(), Error> {
match cudnnDestroyActivationDescriptor(desc) {
cudnnStatus_t::CUDNN_STATUS_SUCCESS => Ok(()),
_ => Err(Error::Unknown("Unable to destroy CUDA cuDNN Activation Descriptor.")),
}
}
unsafe fn ffi_set_activation_descriptor(desc: cudnnActivationDescriptor_t,
mode: cudnnActivationMode_t,
relu_nan_opt: cudnnNanPropagation_t,
relu_ceiling: f64) -> Result<(), Error>{
match cudnnSetActivationDescriptor(desc, mode, relu_nan_opt, relu_ceiling) {
cudnnStatus_t::CUDNN_STATUS_SUCCESS => Ok(()),
cudnnStatus_t::CUDNN_STATUS_BAD_PARAM => Err(Error::BadParam("`window_dim_a`, `padding_a` or `stride_a` has negative element or invalid `mode`.")),
_ => Err(Error::Unknown("Unable to set CUDA cuDNN Activation Descriptor.")),
}
}
}