influence 0.1.5

A Rust CLI tool for downloading HuggingFace models and running local LLM inference
# Influence Configuration
# Copy this file to .env and customize as needed

# Model Configuration
INFLUENCE_MODEL_PATH=./models/TinyLlama_TinyLlama-1.1B-Chat-v1.0

# Generation Parameters
INFLUENCE_TEMPERATURE=0.7
INFLUENCE_TOP_P=0.9
INFLUENCE_TOP_K=
INFLUENCE_REPEAT_PENALTY=1.1
INFLUENCE_MAX_TOKENS=512

# Device Configuration
INFLUENCE_DEVICE=auto
INFLUENCE_DEVICE_INDEX=0

# Server Configuration
INFLUENCE_PORT=8080

# Performance Tuning
INFLUENCE_WARMUP_TOKENS=6

# Download Configuration
INFLUENCE_MIRROR=https://hf-mirror.com
INFLUENCE_OUTPUT_DIR=./models