use crate::adapter::adapters::support::get_api_key;
use crate::adapter::gemini::GeminiStreamer;
use crate::adapter::{Adapter, AdapterKind, ServiceType, WebRequestData};
use crate::chat::{
Binary, BinarySource, ChatOptionsSet, ChatRequest, ChatResponse, ChatResponseFormat, ChatRole, ChatStream,
ChatStreamResponse, CompletionTokensDetails, ContentPart, MessageContent, PromptTokensDetails, ReasoningEffort,
Tool, ToolCall, ToolConfig, ToolName, Usage,
};
use crate::resolver::{AuthData, Endpoint};
use crate::webc::{WebResponse, WebStream};
use crate::{Error, Headers, ModelIden, Result, ServiceTarget};
use reqwest::RequestBuilder;
use serde_json::{Value, json};
use value_ext::JsonValueExt;
pub struct GeminiAdapter;
const REASONING_ZERO: u32 = 0;
const REASONING_LOW: u32 = 1000;
const REASONING_MEDIUM: u32 = 8000;
const REASONING_HIGH: u32 = 24000;
fn insert_gemini_thinking_budget_value(payload: &mut Value, effort: &ReasoningEffort) -> Result<()> {
let budget = match effort {
ReasoningEffort::None => None,
ReasoningEffort::Low | ReasoningEffort::Minimal => Some(REASONING_LOW),
ReasoningEffort::Medium => Some(REASONING_MEDIUM),
ReasoningEffort::High => Some(REASONING_HIGH),
ReasoningEffort::Budget(budget) => Some(*budget),
};
if let Some(budget) = budget {
payload.x_insert("/generationConfig/thinkingConfig/thinkingBudget", budget)?;
}
Ok(())
}
impl GeminiAdapter {
pub const API_KEY_DEFAULT_ENV_NAME: &str = "GEMINI_API_KEY";
}
impl Adapter for GeminiAdapter {
const DEFAULT_API_KEY_ENV_NAME: Option<&'static str> = Some(Self::API_KEY_DEFAULT_ENV_NAME);
fn default_endpoint() -> Endpoint {
const BASE_URL: &str = "https://generativelanguage.googleapis.com/v1beta/";
Endpoint::from_static(BASE_URL)
}
fn default_auth() -> AuthData {
match Self::DEFAULT_API_KEY_ENV_NAME {
Some(env_name) => AuthData::from_env(env_name),
None => AuthData::None,
}
}
async fn all_model_names(kind: AdapterKind) -> Result<Vec<String>> {
let endpoint = Self::default_endpoint();
let auth = Self::default_auth();
let base_url = endpoint.base_url();
let url = format!("{base_url}models");
let api_key = auth.single_key_value().ok();
let headers = api_key
.map(|api_key| Headers::from(("x-goog-api-key".to_string(), api_key)))
.unwrap_or_default();
let web_c = crate::webc::WebClient::default();
let mut res = web_c.do_get(&url, &headers).await.map_err(|webc_error| Error::WebAdapterCall {
adapter_kind: kind,
webc_error,
})?;
let mut models: Vec<String> = Vec::new();
if let Value::Array(models_value) = res.body.x_take("models")? {
for mut model in models_value {
let model_name: String = model.x_take("name")?;
let model_name = model_name.strip_prefix("models/").unwrap_or(&model_name).to_string();
models.push(model_name);
}
}
Ok(models)
}
fn get_service_url(model: &ModelIden, service_type: ServiceType, endpoint: Endpoint) -> Result<String> {
let base_url = endpoint.base_url();
let (_, model_name) = model.model_name.namespace_and_name();
let url = match service_type {
ServiceType::Chat => format!("{base_url}models/{model_name}:generateContent"),
ServiceType::ChatStream => format!("{base_url}models/{model_name}:streamGenerateContent"),
ServiceType::Embed => format!("{base_url}models/{model_name}:embedContent"), };
Ok(url)
}
fn to_web_request_data(
target: ServiceTarget,
service_type: ServiceType,
chat_req: ChatRequest,
options_set: ChatOptionsSet<'_, '_>,
) -> Result<WebRequestData> {
let ServiceTarget { endpoint, auth, model } = target;
let (_, model_name) = model.model_name.namespace_and_name();
let api_key = get_api_key(auth, &model)?;
let headers = Headers::from(("x-goog-api-key".to_string(), api_key.to_string()));
let (provider_model_name, computed_reasoning_effort) = match (model_name, options_set.reasoning_effort()) {
(model, None) => {
if let Some((prefix, last)) = model_name.rsplit_once('-') {
let reasoning = match last {
"zero" => Some(ReasoningEffort::Budget(REASONING_ZERO)),
"none" => Some(ReasoningEffort::None),
"low" | "minimal" => Some(ReasoningEffort::Low),
"medium" => Some(ReasoningEffort::Medium),
"high" => Some(ReasoningEffort::High),
_ => None,
};
let model = if reasoning.is_some() { prefix } else { model };
(model, reasoning)
} else {
(model, None)
}
}
(model, Some(effort)) => (model, Some(effort.clone())),
};
let GeminiChatRequestParts {
system,
contents,
tools,
} = Self::into_gemini_request_parts(&model, chat_req)?;
let mut payload = json!({
"contents": contents,
});
if let Some(computed_reasoning_effort) = computed_reasoning_effort {
if provider_model_name.contains("gemini-3") {
match computed_reasoning_effort {
ReasoningEffort::Low | ReasoningEffort::Minimal => {
payload.x_insert("/generationConfig/thinkingConfig/thinkingLevel", "LOW")?;
}
ReasoningEffort::High => {
payload.x_insert("/generationConfig/thinkingConfig/thinkingLevel", "HIGH")?;
}
other => {
insert_gemini_thinking_budget_value(&mut payload, &other)?;
}
}
}
else {
insert_gemini_thinking_budget_value(&mut payload, &computed_reasoning_effort)?;
}
payload.x_insert("/generationConfig/thinkingConfig/includeThoughts", true)?;
}
if let Some(system) = system {
payload.x_insert(
"systemInstruction",
json!({
"parts": [ { "text": system }]
}),
)?;
}
if let Some(tools) = tools {
payload.x_insert("tools", tools)?;
}
if let Some(ChatResponseFormat::JsonSpec(st_json)) = options_set.response_format() {
payload.x_insert("/generationConfig/responseMimeType", "application/json")?;
let mut schema = st_json.schema.clone();
schema.x_walk(|parent_map, name| {
if name == "additionalProperties" {
parent_map.remove("additionalProperties");
}
true
});
payload.x_insert("/generationConfig/responseJsonSchema", schema)?;
}
if let Some(temperature) = options_set.temperature() {
payload.x_insert("/generationConfig/temperature", temperature)?;
}
if !options_set.stop_sequences().is_empty() {
payload.x_insert("/generationConfig/stopSequences", options_set.stop_sequences())?;
}
if let Some(max_tokens) = options_set.max_tokens() {
payload.x_insert("/generationConfig/maxOutputTokens", max_tokens)?;
}
if let Some(top_p) = options_set.top_p() {
payload.x_insert("/generationConfig/topP", top_p)?;
}
let provider_model = model.from_name(provider_model_name);
let url = Self::get_service_url(&provider_model, service_type, endpoint)?;
Ok(WebRequestData { url, headers, payload })
}
fn to_chat_response(
model_iden: ModelIden,
web_response: WebResponse,
_options_set: ChatOptionsSet<'_, '_>,
) -> Result<ChatResponse> {
let WebResponse { mut body, .. } = web_response;
let provider_model_name: Option<String> = body.x_remove("modelVersion").ok();
let provider_model_iden = model_iden.from_optional_name(provider_model_name);
let gemini_response = Self::body_to_gemini_chat_response(&model_iden.clone(), body)?;
let GeminiChatResponse {
content: gemini_content,
usage,
} = gemini_response;
let mut thoughts: Vec<String> = Vec::new();
let mut reasonings: Vec<String> = Vec::new();
let mut texts: Vec<String> = Vec::new();
let mut tool_calls: Vec<ToolCall> = Vec::new();
let mut binary_parts: Vec<Binary> = Vec::new();
for g_item in gemini_content {
match g_item {
GeminiChatContent::Text(text) => texts.push(text),
GeminiChatContent::Binary(binary) => binary_parts.push(binary),
GeminiChatContent::ToolCall(tool_call) => tool_calls.push(tool_call),
GeminiChatContent::ThoughtSignature(thought) => thoughts.push(thought),
GeminiChatContent::Reasoning(reasoning_text) => reasonings.push(reasoning_text),
}
}
let thought_signatures_for_call = (!thoughts.is_empty() && !tool_calls.is_empty()).then(|| thoughts.clone());
let mut parts: Vec<ContentPart> = thoughts.into_iter().map(ContentPart::ThoughtSignature).collect();
if let Some(signatures) = thought_signatures_for_call
&& let Some(first_call) = tool_calls.first_mut()
{
first_call.thought_signatures = Some(signatures);
}
if !texts.is_empty() {
let total_len: usize = texts.iter().map(|t| t.len()).sum();
let mut combined_text = String::with_capacity(total_len);
for text in texts {
combined_text.push_str(&text);
}
if !combined_text.is_empty() {
parts.push(ContentPart::Text(combined_text));
}
}
let mut reasoning_text = String::new();
if !reasonings.is_empty() {
for reasoning in &reasonings {
reasoning_text.push_str(reasoning);
}
}
if !binary_parts.is_empty() {
for binary in binary_parts {
parts.push(ContentPart::Binary(binary));
}
}
parts.extend(tool_calls.into_iter().map(ContentPart::ToolCall));
let content = MessageContent::from_parts(parts);
Ok(ChatResponse {
content,
reasoning_content: Some(reasoning_text),
model_iden,
provider_model_iden,
usage,
captured_raw_body: None, })
}
fn to_chat_stream(
model_iden: ModelIden,
reqwest_builder: RequestBuilder,
options_set: ChatOptionsSet<'_, '_>,
) -> Result<ChatStreamResponse> {
let web_stream = WebStream::new_with_pretty_json_array(reqwest_builder);
let gemini_stream = GeminiStreamer::new(web_stream, model_iden.clone(), options_set);
let chat_stream = ChatStream::from_inter_stream(gemini_stream);
Ok(ChatStreamResponse {
model_iden,
stream: chat_stream,
})
}
fn to_embed_request_data(
service_target: crate::ServiceTarget,
embed_req: crate::embed::EmbedRequest,
options_set: crate::embed::EmbedOptionsSet<'_, '_>,
) -> Result<crate::adapter::WebRequestData> {
super::embed::to_embed_request_data(service_target, embed_req, options_set)
}
fn to_embed_response(
model_iden: crate::ModelIden,
web_response: crate::webc::WebResponse,
options_set: crate::embed::EmbedOptionsSet<'_, '_>,
) -> Result<crate::embed::EmbedResponse> {
super::embed::to_embed_response(model_iden, web_response, options_set)
}
}
impl GeminiAdapter {
pub(super) fn body_to_gemini_chat_response(model_iden: &ModelIden, mut body: Value) -> Result<GeminiChatResponse> {
if body.get("error").is_some() {
return Err(Error::ChatResponse {
model_iden: model_iden.clone(),
body,
});
}
let mut content: Vec<GeminiChatContent> = Vec::new();
let parts = match body.x_take::<Vec<Value>>("/candidates/0/content/parts") {
Ok(parts) => parts,
Err(_) => {
let finish_reason = body.x_remove::<String>("/candidates/finishReason").ok();
let usage_metadata = body.x_remove::<Value>("/usageMetadata").ok();
let body = json!({
"finishReason": finish_reason,
"usageMetadata": usage_metadata,
});
return Err(Error::ChatResponse {
model_iden: model_iden.clone(),
body,
});
}
};
for mut part in parts {
{
if let Some(thought_signature) = part
.x_take::<Value>("thoughtSignature")
.ok()
.and_then(|v| if let Value::String(v) = v { Some(v) } else { None })
{
content.push(GeminiChatContent::ThoughtSignature(thought_signature));
}
else if let Some(thought) = part
.x_take::<Value>("thought")
.ok()
.and_then(|v| if let Value::Bool(v) = v { Some(v) } else { None })
&& thought && let Some(val) = part
.x_take::<Value>("text")
.ok()
.and_then(|v| if let Value::String(v) = v { Some(v) } else { None })
{
content.push(GeminiChatContent::Reasoning(val));
}
}
if let Ok(fn_call_value) = part.x_take::<Value>("functionCall") {
let tool_call = ToolCall {
call_id: fn_call_value.x_get("name").unwrap_or("".to_string()), fn_name: fn_call_value.x_get("name").unwrap_or("".to_string()),
fn_arguments: fn_call_value.x_get("args").unwrap_or(Value::Null),
thought_signatures: None,
};
content.push(GeminiChatContent::ToolCall(tool_call))
}
if let Some(txt_content) = part
.x_take::<Value>("text")
.ok()
.and_then(|v| if let Value::String(v) = v { Some(v) } else { None })
.map(GeminiChatContent::Text)
{
content.push(txt_content)
}
if let Ok(inline_data) = part.x_take::<Value>("inlineData") {
if let Ok(mime_type) = inline_data.x_get::<String>("mimeType")
&& let Ok(data) = inline_data.x_get::<String>("data")
{
let binary = Binary::from_base64(mime_type, data, None);
content.push(GeminiChatContent::Binary(binary));
}
}
}
let usage = body.x_take::<Value>("usageMetadata").map(Self::into_usage).unwrap_or_default();
Ok(GeminiChatResponse { content, usage })
}
pub(super) fn into_usage(mut usage_value: Value) -> Usage {
let total_tokens: Option<i32> = usage_value.x_take("totalTokenCount").ok();
let prompt_tokens: Option<i32> = usage_value.x_take("promptTokenCount").ok();
let g_cached_tokens: Option<i32> = usage_value.x_take("cachedContentTokenCount").ok();
let prompt_tokens_details = g_cached_tokens.map(|g_cached_tokens| PromptTokensDetails {
cache_creation_tokens: None,
cache_creation_details: None,
cached_tokens: Some(g_cached_tokens),
audio_tokens: None,
});
let g_candidate_tokens: Option<i32> = usage_value.x_take("candidatesTokenCount").ok();
let g_thoughts_tokens: Option<i32> = usage_value.x_take("thoughtsTokenCount").ok();
let (completion_tokens, completion_tokens_details) = match (g_candidate_tokens, g_thoughts_tokens) {
(Some(c_tokens), Some(t_tokens)) => (
Some(c_tokens + t_tokens),
Some(CompletionTokensDetails {
accepted_prediction_tokens: None,
rejected_prediction_tokens: None,
reasoning_tokens: Some(t_tokens),
audio_tokens: None,
}),
),
(None, Some(t_tokens)) => {
(
None,
Some(CompletionTokensDetails {
accepted_prediction_tokens: None,
rejected_prediction_tokens: None,
reasoning_tokens: Some(t_tokens), audio_tokens: None,
}),
)
}
(c_tokens, None) => (c_tokens, None),
};
Usage {
prompt_tokens,
prompt_tokens_details,
completion_tokens,
completion_tokens_details,
total_tokens,
}
}
fn into_gemini_request_parts(
model_iden: &ModelIden, chat_req: ChatRequest,
) -> Result<GeminiChatRequestParts> {
let mut contents: Vec<Value> = Vec::new();
let mut systems: Vec<String> = Vec::new();
if let Some(system) = chat_req.system {
systems.push(system);
}
for msg in chat_req.messages {
match msg.role {
ChatRole::System => {
if let Some(content) = msg.content.into_joined_texts() {
systems.push(content);
}
}
ChatRole::User => {
let mut parts_values: Vec<Value> = Vec::new();
for part in msg.content {
match part {
ContentPart::Text(text) => parts_values.push(json!({"text": text})),
ContentPart::Binary(binary) => {
let Binary {
content_type, source, ..
} = binary;
match &source {
BinarySource::Url(url) => parts_values.push(json!({
"file_data": {
"mime_type": content_type,
"file_uri": url
}
})),
BinarySource::Base64(content) => parts_values.push(json!({
"inline_data": {
"mime_type": content_type,
"data": content
}
})),
}
}
ContentPart::ToolCall(tool_call) => {
parts_values.push(json!({
"functionCall": {
"name": tool_call.fn_name,
"args": tool_call.fn_arguments,
}
}));
}
ContentPart::ToolResponse(tool_response) => {
parts_values.push(json!({
"functionResponse": {
"name": tool_response.call_id,
"response": {
"name": tool_response.call_id,
"content": tool_response.content,
}
}
}));
}
ContentPart::ThoughtSignature(thought) => {
parts_values.push(json!({
"thoughtSignature": thought
}));
}
ContentPart::Custom(_) => {}
}
}
contents.push(json!({"role": "user", "parts": parts_values}));
}
ChatRole::Assistant => {
let mut parts_values: Vec<Value> = Vec::new();
let mut pending_thought: Option<String> = None;
let mut is_first_tool_call = true;
for part in msg.content {
match part {
ContentPart::Text(text) => {
if let Some(thought) = pending_thought.take() {
parts_values.push(json!({"thoughtSignature": thought}));
}
parts_values.push(json!({"text": text}));
}
ContentPart::ToolCall(tool_call) => {
let mut part_obj = serde_json::Map::new();
part_obj.insert(
"functionCall".to_string(),
json!({
"name": tool_call.fn_name,
"args": tool_call.fn_arguments,
}),
);
match pending_thought.take() {
Some(thought) => {
part_obj.insert("thoughtSignature".to_string(), json!(thought));
}
None => {
let is_gemini_3 = model_iden.model_name.contains("gemini-3");
if is_gemini_3 && is_first_tool_call {
part_obj.insert(
"thoughtSignature".to_string(),
json!("skip_thought_signature_validator"),
);
}
}
}
parts_values.push(Value::Object(part_obj));
is_first_tool_call = false;
}
ContentPart::ThoughtSignature(thought) => {
if let Some(prev_thought) = pending_thought.take() {
parts_values.push(json!({"thoughtSignature": prev_thought}));
}
pending_thought = Some(thought);
}
ContentPart::Binary(_) => {
if let Some(thought) = pending_thought.take() {
parts_values.push(json!({"thoughtSignature": thought}));
}
}
ContentPart::ToolResponse(_) => {
if let Some(thought) = pending_thought.take() {
parts_values.push(json!({"thoughtSignature": thought}));
}
}
ContentPart::Custom(_) => {}
}
}
if let Some(thought) = pending_thought {
parts_values.push(json!({"thoughtSignature": thought}));
}
if !parts_values.is_empty() {
contents.push(json!({"role": "model", "parts": parts_values}));
}
}
ChatRole::Tool => {
let mut parts_values: Vec<Value> = Vec::new();
for part in msg.content {
match part {
ContentPart::ToolCall(tool_call) => {
parts_values.push(json!({
"functionCall": {
"name": tool_call.fn_name,
"args": tool_call.fn_arguments,
}
}));
}
ContentPart::ToolResponse(tool_response) => {
parts_values.push(json!({
"functionResponse": {
"name": tool_response.call_id,
"response": {
"name": tool_response.call_id,
"content": tool_response.content,
}
}
}));
}
ContentPart::ThoughtSignature(thought) => {
parts_values.push(json!({
"thoughtSignature": thought
}));
}
_ => {
return Err(Error::MessageContentTypeNotSupported {
model_iden: model_iden.clone(),
cause: "ChatRole::Tool can only contain ToolCall, ToolResponse, or Thought content parts",
});
}
}
}
contents.push(json!({"role": "user", "parts": parts_values}));
}
}
}
let system = if !systems.is_empty() {
Some(systems.join("\n"))
} else {
None
};
let tools = if let Some(req_tools) = chat_req.tools {
let mut tools: Vec<Value> = Vec::new();
let mut function_declarations: Vec<Value> = Vec::new();
for req_tool in req_tools {
match Self::tool_to_gemini_tool(req_tool)? {
GeminiTool::Builtin(value) => tools.push(value),
GeminiTool::User(value) => function_declarations.push(value),
}
}
if !function_declarations.is_empty() {
tools.push(json!({"functionDeclarations": function_declarations}));
}
Some(tools)
} else {
None
};
Ok(GeminiChatRequestParts {
system,
contents,
tools,
})
}
fn tool_to_gemini_tool(tool: Tool) -> Result<GeminiTool> {
let Tool {
name,
description,
schema,
config,
} = tool;
let name_str = match &name {
ToolName::WebSearch => "googleSearch",
ToolName::Custom(name) => name.as_str(),
};
if matches!(
name_str,
"googleSearch" | "googleSearchRetrieval" | "codeExecution" | "urlContext"
) {
let config = match config {
Some(ToolConfig::WebSearch(_config)) => Some(json!({})),
Some(ToolConfig::Custom(config)) => Some(config),
None => None,
};
Ok(GeminiTool::Builtin(json!({ name_str: config })))
}
else {
Ok(GeminiTool::User(json!({
"name": name_str,
"description": description,
"parameters": schema,
})))
}
}
}
pub enum GeminiTool {
Builtin(Value),
User(Value),
}
pub(super) struct GeminiChatResponse {
pub content: Vec<GeminiChatContent>,
pub usage: Usage,
}
pub(super) enum GeminiChatContent {
Text(String),
Binary(Binary),
ToolCall(ToolCall),
Reasoning(String),
ThoughtSignature(String),
}
struct GeminiChatRequestParts {
system: Option<String>,
contents: Vec<Value>,
tools: Option<Vec<Value>>,
}