use std::path::PathBuf;
use rust_bert::gpt_j::{GptJConfigResources, GptJMergesResources, GptJVocabResources};
use rust_bert::pipelines::common::{ModelResource, ModelType};
use rust_bert::pipelines::text_generation::{TextGenerationConfig, TextGenerationModel};
use rust_bert::resources::{LocalResource, RemoteResource};
use tch::Device;
fn main() -> anyhow::Result<()> {
let config_resource = Box::new(RemoteResource::from_pretrained(
GptJConfigResources::GPT_J_6B_FLOAT16,
));
let vocab_resource = Box::new(RemoteResource::from_pretrained(
GptJVocabResources::GPT_J_6B_FLOAT16,
));
let merges_resource = Box::new(RemoteResource::from_pretrained(
GptJMergesResources::GPT_J_6B_FLOAT16,
));
let model_resource = Box::new(LocalResource::from(PathBuf::from(
"resources/gpt-j-6B-float16/rust_model.ot",
)));
let generation_config = TextGenerationConfig {
model_type: ModelType::GPTJ,
model_resource: ModelResource::Torch(model_resource),
config_resource,
vocab_resource,
merges_resource: Some(merges_resource),
min_length: 10,
max_length: Some(32),
do_sample: false,
early_stopping: true,
num_beams: 1,
num_return_sequences: 1,
device: Device::cuda_if_available(),
..Default::default()
};
let model = TextGenerationModel::new(generation_config)?;
let prompts = [
"It was a very nice and sunny",
"It was a gloom winter night, and",
];
let output = model.generate(&prompts, None)?;
assert_eq!(output.len(), 2);
assert_eq!(output[0], "It was a very nice and sunny day, and I was sitting in the garden of my house, enjoying the sun and the fresh air. I was thinking");
assert_eq!(output[1], "It was a gloom winter night, and the wind was howling. The snow was falling, and the temperature was dropping. The snow was coming down so hard");
Ok(())
}