nighthawk 0.2.0

AI terminal autocomplete — zero config, zero login, zero telemetry
Documentation
{
  "name": "hyperfine",
  "description": "A command-line benchmarking tool",
  "options": [
    {
      "names": [
        "--warmup",
        "-w"
      ],
      "description": "Perform warmupruns (number) before the actual benchmarking starts",
      "takes_arg": true,
      "arg": {
        "name": "NUM"
      }
    },
    {
      "names": [
        "--min-runs",
        "-m"
      ],
      "description": "Perform at least NUM runs for each command",
      "takes_arg": true,
      "arg": {
        "name": "NUM"
      }
    },
    {
      "names": [
        "--max-runs",
        "-M"
      ],
      "description": "Perform at most NUM runs for each command. Default: no limit",
      "takes_arg": true,
      "arg": {
        "name": "NUM"
      }
    },
    {
      "names": [
        "--runs",
        "-r"
      ],
      "description": "Perform exactly NUM runs for each command",
      "takes_arg": true,
      "arg": {
        "name": "NUM"
      }
    },
    {
      "names": [
        "--setup",
        "-s"
      ],
      "description": "Execute cmd once before each set of timing runs",
      "takes_arg": true,
      "arg": {
        "name": "CMD"
      }
    },
    {
      "names": [
        "--prepare",
        "-p"
      ],
      "description": "Execute cmd before each timing run. This is useful for clearing disk caches, for example",
      "takes_arg": true,
      "arg": {
        "name": "CMD ..."
      }
    },
    {
      "names": [
        "--cleanup",
        "-c"
      ],
      "description": "Execute cmd after the completion of all benchmarking runs for each individual command to be benchmarked",
      "takes_arg": true,
      "arg": {
        "name": "CMD"
      }
    },
    {
      "names": [
        "--parameter-scan",
        "-P"
      ],
      "description": "Perform benchmark runs for each value in the range min..max",
      "takes_arg": true,
      "arg": {
        "name": "VAR"
      }
    },
    {
      "names": [
        "--parameter-step-size",
        "-D"
      ],
      "description": "This argument requires --parameter-scan to be specified as well. Traverse the range min..max in steps of delta",
      "takes_arg": true,
      "arg": {
        "name": "delta"
      }
    },
    {
      "names": [
        "--parameter-list",
        "-L"
      ],
      "description": "Perform benchmark runs for each value in the comma-separated list of values",
      "takes_arg": true,
      "arg": {
        "name": "VAR"
      }
    },
    {
      "names": [
        "--style"
      ],
      "description": "Set output style type",
      "takes_arg": true,
      "arg": {
        "name": "STYLE",
        "suggestions": [
          "basic",
          "full",
          "nocolor",
          "color",
          "none"
        ]
      }
    },
    {
      "names": [
        "--shell",
        "-S"
      ],
      "description": "Set the shell to use for executing benchmarked commands",
      "takes_arg": true,
      "arg": {
        "name": "SHELL",
        "suggestions": [
          "bash",
          "zsh",
          "sh",
          "fish",
          "pwsh",
          "powershell"
        ]
      }
    },
    {
      "names": [
        "--ignore-failure",
        "-i"
      ],
      "description": "Ignore non-zero exit codes of the benchmarked commands"
    },
    {
      "names": [
        "--time-unit",
        "-u"
      ],
      "description": "Set the time unit to use for the benchmark results",
      "takes_arg": true,
      "arg": {
        "name": "UNIT",
        "suggestions": [
          "millisecond",
          "second"
        ]
      }
    },
    {
      "names": [
        "--export-asciidoc"
      ],
      "description": "Export the timing summary statistics as an AsciiDoc table to the given file",
      "takes_arg": true,
      "arg": {
        "name": "FILE",
        "template": "filepaths"
      }
    },
    {
      "names": [
        "--export-csv"
      ],
      "description": "Export the timing summary statistics as CSV to the given file",
      "takes_arg": true,
      "arg": {
        "name": "FILE",
        "template": "filepaths"
      }
    },
    {
      "names": [
        "--export-json"
      ],
      "description": "Export the timing summary statistics and timings of individual runs as JSON to the given file",
      "takes_arg": true,
      "arg": {
        "name": "FILE",
        "template": "filepaths"
      }
    },
    {
      "names": [
        "--export-markdown"
      ],
      "description": "Export the timing summary statistics as a Markdown table to the given file",
      "takes_arg": true,
      "arg": {
        "name": "FILE",
        "template": "filepaths"
      }
    },
    {
      "names": [
        "--show-output"
      ],
      "description": "Print the stdout and stderr of the benchmark instead of suppressing it"
    },
    {
      "names": [
        "--command-name"
      ],
      "description": "Identify a command with the given name",
      "takes_arg": true,
      "arg": {
        "name": "NAME"
      }
    },
    {
      "names": [
        "--help"
      ],
      "description": "Prints help message"
    },
    {
      "names": [
        "--version"
      ],
      "description": "Shows version information"
    }
  ],
  "args": [
    {
      "name": "CMD",
      "description": "Command to benchmark"
    }
  ]
}