Struct aleph_alpha_client::Client

source ·
pub struct Client { /* private fields */ }
Expand description

Execute Jobs against the Aleph Alpha API

Implementations§

source§

impl Client

source

pub fn new(api_token: &str) -> Result<Self, Error>

A new instance of an Aleph Alpha client helping you interact with the Aleph Alpha API.

source

pub fn with_base_url(host: String, api_token: &str) -> Result<Self, Error>

In production you typically would want set this to https://api.aleph-alpha.com. Yet you may want to use a different instances for testing.

source

pub async fn execute<T: Task>( &self, model: &str, task: &T, how: &How, ) -> Result<T::Output, Error>

👎Deprecated: Please use output_of instead.

Execute a task with the aleph alpha API and fetch its result.

use aleph_alpha_client::{Client, How, TaskCompletion, Error};

async fn print_completion() -> Result<(), Error> {
    // Authenticate against API. Fetches token.
    let client = Client::new("AA_API_TOKEN")?;

    // Name of the model we we want to use. Large models give usually better answer, but are
    // also slower and more costly.
    let model = "luminous-base";

    // The task we want to perform. Here we want to continue the sentence: "An apple a day
    // ..."
    let task = TaskCompletion::from_text("An apple a day", 10);

    // Retrieve answer from API
    let response = client.execute(model, &task, &How::default()).await?;

    // Print entire sentence with completion
    println!("An apple a day{}", response.completion);
    Ok(())
}
source

pub async fn output_of<T: Job>( &self, task: &T, how: &How, ) -> Result<T::Output, Error>

Execute any task with the aleph alpha API and fetch its result. This is most usefull in generic code then you want to execute arbitrary task types. Otherwise prefer methods taking concrete tasks like Self::completion for improved readability.

source

pub async fn semantic_embedding( &self, task: &TaskSemanticEmbedding<'_>, how: &How, ) -> Result<SemanticEmbeddingOutput, Error>

An embedding trying to capture the semantic meaning of a text. Cosine similarity can be used find out how well two texts (or multimodal prompts) match. Useful for search usecases.

See the example for cosine_similarity.

source

pub async fn batch_semantic_embedding( &self, task: &TaskBatchSemanticEmbedding<'_>, how: &How, ) -> Result<BatchSemanticEmbeddingOutput, Error>

An batch of embeddings trying to capture the semantic meaning of a text.

source

pub async fn completion( &self, task: &TaskCompletion<'_>, model: &str, how: &How, ) -> Result<CompletionOutput, Error>

Instruct a model served by the aleph alpha API to continue writing a piece of text (or multimodal document).

use aleph_alpha_client::{Client, How, TaskCompletion, Task, Error};

async fn print_completion() -> Result<(), Error> {
    // Authenticate against API. Fetches token.
    let client = Client::new("AA_API_TOKEN")?;

    // Name of the model we we want to use. Large models give usually better answer, but are
    // also slower and more costly.
    let model = "luminous-base";

    // The task we want to perform. Here we want to continue the sentence: "An apple a day
    // ..."
    let task = TaskCompletion::from_text("An apple a day", 10);

    // Retrieve answer from API
    let response = client.completion(&task, model, &How::default()).await?;

    // Print entire sentence with completion
    println!("An apple a day{}", response.completion);
    Ok(())
}
source

pub async fn explanation( &self, task: &TaskExplanation<'_>, model: &str, how: &How, ) -> Result<ExplanationOutput, Error>

Returns an explanation given a prompt and a target (typically generated by a previous completion request). The explanation describes how individual parts of the prompt influenced the target.

use aleph_alpha_client::{Client, How, TaskCompletion, Task, Error, Granularity, TaskExplanation, Stopping, Prompt, Sampling};

async fn print_explanation() -> Result<(), Error> {
    let client = Client::new("AA_API_TOKEN")?;

    // Name of the model we we want to use. Large models give usually better answer, but are
    // also slower and more costly.
    let model = "luminous-base";

    // input for the completion
    let prompt = Prompt::from_text("An apple a day");

    let task = TaskCompletion {
        prompt: prompt.clone(),
        stopping: Stopping::from_maximum_tokens(10),
        sampling: Sampling::MOST_LIKELY,
    };
    let response = client.completion(&task, model, &How::default()).await?;

    let task = TaskExplanation {
        prompt: prompt,               // same input as for completion
        target: &response.completion,  // output of completion
        granularity: Granularity::default(),
    };
    let response = client.explanation(&task, model, &How::default()).await?;

    dbg!(&response);
    Ok(())
}
source

pub async fn tokenize( &self, task: &TaskTokenization<'_>, model: &str, how: &How, ) -> Result<TokenizationOutput, Error>

Tokenize a prompt for a specific model.

use aleph_alpha_client::{Client, Error, How, TaskTokenization};

async fn tokenize() -> Result<(), Error> {
    let client = Client::new("AA_API_TOKEN")?;

    // Name of the model for which we want to tokenize text.
    let model = "luminous-base";

    // Text prompt to be tokenized.
    let prompt = "An apple a day";

    let task = TaskTokenization {
        prompt,
        tokens: true,       // return text-tokens
        token_ids: true,    // return numeric token-ids
    };
    let respones = client.tokenize(&task, model, &How::default()).await?;

    dbg!(&respones);
    Ok(())
}
source

pub async fn detokenize( &self, task: &TaskDetokenization<'_>, model: &str, how: &How, ) -> Result<DetokenizationOutput, Error>

Detokenize a list of token ids into a string.

use aleph_alpha_client::{Client, Error, How, TaskDetokenization};

async fn detokenize() -> Result<(), Error> {
    let client = Client::new("AA_API_TOKEN")?;

    // Specify the name of the model whose tokenizer was used to generate the input token ids.
    let model = "luminous-base";

    // Token ids to convert into text.
    let token_ids: Vec<u32> = vec![556, 48741, 247, 2983];

    let task = TaskDetokenization {
        token_ids: &token_ids,
    };
    let respones = client.detokenize(&task, model, &How::default()).await?;

    dbg!(&respones);
    Ok(())
}

Auto Trait Implementations§

§

impl Freeze for Client

§

impl !RefUnwindSafe for Client

§

impl Send for Client

§

impl Sync for Client

§

impl Unpin for Client

§

impl !UnwindSafe for Client

Blanket Implementations§

source§

impl<T> Any for T
where T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

source§

impl<T> Instrument for T

source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
source§

impl<T, U> Into<U> for T
where U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

source§

impl<T> IntoEither for T

source§

fn into_either(self, into_left: bool) -> Either<Self, Self>

Converts self into a Left variant of Either<Self, Self> if into_left is true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
source§

fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
where F: FnOnce(&Self) -> bool,

Converts self into a Left variant of Either<Self, Self> if into_left(&self) returns true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
source§

impl<T> Pointable for T

source§

const ALIGN: usize = _

The alignment of pointer.
§

type Init = T

The type for initializers.
source§

unsafe fn init(init: <T as Pointable>::Init) -> usize

Initializes a with the given initializer. Read more
source§

unsafe fn deref<'a>(ptr: usize) -> &'a T

Dereferences the given pointer. Read more
source§

unsafe fn deref_mut<'a>(ptr: usize) -> &'a mut T

Mutably dereferences the given pointer. Read more
source§

unsafe fn drop(ptr: usize)

Drops the object pointed to by the given pointer. Read more
source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
source§

impl<T> WithSubscriber for T

source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
source§

impl<T> ErasedDestructor for T
where T: 'static,

source§

impl<T> MaybeSendSync for T