import os
from llmkit import LLMKitClient, Message, CompletionRequest
def main():
api_key = os.environ.get("OPENROUTER_API_KEY")
if not api_key:
raise ValueError("OPENROUTER_API_KEY environment variable not set")
client = LLMKitClient(
providers={
"openai_compatible": {
"api_key": api_key,
"base_url": "https://openrouter.ai/api/v1",
"model_id": "openroutercompat", }
}
)
model = "openroutercompat/qwen/qwen3-32b"
print("OpenAI-Compatible Provider Example")
print("=" * 50)
print("Using OpenRouter via OpenAI-compatible endpoint\n")
print("Request with manual /no_think:")
print("-" * 40)
request = CompletionRequest(
model=model,
messages=[Message.user("What is 2 + 2? Answer briefly. /no_think")],
system="You are a helpful assistant.",
max_tokens=100,
)
response = client.complete(request)
print(f"Response: {response.text_content().strip()}\n")
print("Another request:")
print("-" * 40)
request = CompletionRequest(
model=model,
messages=[Message.user("Name 3 colors. /no_think")],
system="Be concise.",
max_tokens=100,
)
response = client.complete(request)
print(f"Response: {response.text_content().strip()}\n")
print("=" * 50)
print("OpenRouter works via OpenAI-compatible endpoint!")
print("Note: Manual /no_think required for Qwen3 models.")
if __name__ == "__main__":
main()