#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Image {
#[prost(enumeration = "image::ImageType", tag = "2")]
pub image_type: i32,
#[prost(oneof = "image::ImageSource", tags = "1")]
pub image_source: ::core::option::Option<image::ImageSource>,
}
pub mod image {
#[derive(
Clone,
Copy,
Debug,
PartialEq,
Eq,
Hash,
PartialOrd,
Ord,
::prost::Enumeration
)]
#[repr(i32)]
pub enum ImageType {
Unspecified = 0,
Jpeg = 1,
Png = 2,
}
impl ImageType {
pub fn as_str_name(&self) -> &'static str {
match self {
ImageType::Unspecified => "IMAGE_TYPE_UNSPECIFIED",
ImageType::Jpeg => "JPEG",
ImageType::Png => "PNG",
}
}
pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
match value {
"IMAGE_TYPE_UNSPECIFIED" => Some(Self::Unspecified),
"JPEG" => Some(Self::Jpeg),
"PNG" => Some(Self::Png),
_ => None,
}
}
}
#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum ImageSource {
#[prost(bytes, tag = "1")]
Content(::prost::alloc::vec::Vec<u8>),
}
}
#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Label {
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub description: ::prost::alloc::string::String,
}
#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ClassAnnotation {
#[prost(message, optional, tag = "1")]
pub label: ::core::option::Option<Label>,
#[prost(double, tag = "2")]
pub confidence: f64,
}
#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ClassifierSpecification {
#[prost(message, repeated, tag = "1")]
pub labels: ::prost::alloc::vec::Vec<Label>,
#[prost(enumeration = "classifier_specification::ClassificationType", tag = "2")]
pub classification_type: i32,
}
pub mod classifier_specification {
#[derive(
Clone,
Copy,
Debug,
PartialEq,
Eq,
Hash,
PartialOrd,
Ord,
::prost::Enumeration
)]
#[repr(i32)]
pub enum ClassificationType {
Unspecified = 0,
MultiLabel = 1,
MultiClass = 2,
}
impl ClassificationType {
pub fn as_str_name(&self) -> &'static str {
match self {
ClassificationType::Unspecified => "CLASSIFICATION_TYPE_UNSPECIFIED",
ClassificationType::MultiLabel => "MULTI_LABEL",
ClassificationType::MultiClass => "MULTI_CLASS",
}
}
pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
match value {
"CLASSIFICATION_TYPE_UNSPECIFIED" => Some(Self::Unspecified),
"MULTI_LABEL" => Some(Self::MultiLabel),
"MULTI_CLASS" => Some(Self::MultiClass),
_ => None,
}
}
}
}
#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AnnotationResponse {
#[prost(string, tag = "1")]
pub request_id: ::prost::alloc::string::String,
#[prost(message, optional, tag = "2")]
pub classifier_specification: ::core::option::Option<ClassifierSpecification>,
#[prost(message, repeated, tag = "3")]
pub annotations: ::prost::alloc::vec::Vec<ClassAnnotation>,
}
#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AnnotationRequest {
#[prost(message, optional, tag = "1")]
pub image: ::core::option::Option<Image>,
}
pub mod image_classifier_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
use tonic::codegen::http::Uri;
#[derive(Debug, Clone)]
pub struct ImageClassifierServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl ImageClassifierServiceClient<tonic::transport::Channel> {
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> ImageClassifierServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::Error: Into<StdError>,
T::ResponseBody: Body<Data = Bytes> + Send + 'static,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_origin(inner: T, origin: Uri) -> Self {
let inner = tonic::client::Grpc::with_origin(inner, origin);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> ImageClassifierServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T::ResponseBody: Default,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
>>::Error: Into<StdError> + Send + Sync,
{
ImageClassifierServiceClient::new(
InterceptedService::new(inner, interceptor),
)
}
#[must_use]
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.send_compressed(encoding);
self
}
#[must_use]
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
self.inner = self.inner.accept_compressed(encoding);
self
}
#[must_use]
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_decoding_message_size(limit);
self
}
#[must_use]
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
self.inner = self.inner.max_encoding_message_size(limit);
self
}
pub async fn annotate(
&mut self,
request: impl tonic::IntoRequest<super::AnnotationRequest>,
) -> std::result::Result<
tonic::Response<super::AnnotationResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/yandex.cloud.ai.vision.v2.ImageClassifierService/Annotate",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new(
"yandex.cloud.ai.vision.v2.ImageClassifierService",
"Annotate",
),
);
self.inner.unary(req, path, codec).await
}
}
}