inference-lab 0.6.1

High-performance LLM inference simulator for analyzing serving systems
Documentation
{
  "name": "@doublewordai/inference-lab",
  "version": "0.6.1",
  "description": "High-performance LLM inference simulator for analyzing serving systems",
  "main": "pkg/inference_lab.js",
  "types": "pkg/inference_lab.d.ts",
  "files": [
    "pkg"
  ],
  "scripts": {
    "build": "wasm-pack build --target web --no-default-features --out-dir pkg",
    "build:cli": "cargo build --release",
    "publish:npm": "npm publish --access public"
  },
  "repository": {
    "type": "git",
    "url": "https://github.com/doublewordai/inference-lab.git"
  },
  "keywords": [
    "llm",
    "inference",
    "simulator",
    "wasm",
    "performance"
  ],
  "author": "Doubleword",
  "license": "MIT",
  "publishConfig": {
    "access": "public"
  }
}