{
"openapi": "3.0.3",
"info": {
"title": "Feldera API",
"description": "\nWith Feldera, users create data pipelines out of SQL programs.\nA SQL program comprises tables and views, and includes as well the definition of\ninput and output connectors for each respectively. A connector defines a data\nsource or data sink to feed input data into tables or receive output data\ncomputed by the views respectively.\n\n## Pipeline\n\nThe API is centered around the **pipeline**, which most importantly consists\nout of the SQL program, but also has accompanying metadata and configuration parameters\n(e.g., compilation profile, number of workers, etc.).\n\n* A pipeline is identified and referred to by its user-provided unique name.\n* The pipeline program is asynchronously compiled when the pipeline is first created or\n when its program is subsequently updated.\n* Pipeline deployment is only possible once the program is successfully compiled.\n* A pipeline cannot be updated while it is deployed.\n\n## Concurrency\n\nEach pipeline has a version, which is incremented each time its core fields are updated.\nThe version is monotonically increasing. There is additionally a program version which covers\nonly the program-related core fields, and is used by the compiler to discern when to recompile.",
"license": {
"name": "MIT OR Apache-2.0"
},
"version": "0.44.0"
},
"paths": {
"/config/authentication": {
"get": {
"tags": [
"Configuration"
],
"summary": "Retrieve authentication provider configuration.",
"operationId": "get_config_authentication",
"responses": {
"200": {
"description": "The response body contains Authentication Provider configuration, or is empty if no auth is configured.",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/AuthProvider"
}
}
}
},
"500": {
"description": "Request failed.",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
}
}
},
"/v0/api_keys": {
"get": {
"tags": [
"API keys"
],
"summary": "Retrieve the list of API keys.",
"operationId": "list_api_keys",
"responses": {
"200": {
"description": "API keys retrieved successfully",
"content": {
"application/json": {
"schema": {
"type": "array",
"items": {
"$ref": "#/components/schemas/ApiKeyDescr"
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
},
"post": {
"tags": [
"API keys"
],
"summary": "Create a new API key.",
"operationId": "post_api_key",
"requestBody": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/NewApiKeyRequest"
}
}
},
"required": true
},
"responses": {
"201": {
"description": "API key created successfully",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/NewApiKeyResponse"
}
}
}
},
"409": {
"description": "API key with that name already exists",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "An entity with this name already exists",
"error_code": "DuplicateName",
"details": null
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/api_keys/{api_key_name}": {
"get": {
"tags": [
"API keys"
],
"summary": "Retrieve an API key.",
"operationId": "get_api_key",
"parameters": [
{
"name": "api_key_name",
"in": "path",
"description": "Unique API key name",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "API key retrieved successfully",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ApiKeyDescr"
}
}
}
},
"404": {
"description": "API key with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown API key 'non-existent-api-key'",
"error_code": "UnknownApiKey",
"details": {
"name": "non-existent-api-key"
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
},
"delete": {
"tags": [
"API keys"
],
"summary": "Delete an API key.",
"operationId": "delete_api_key",
"parameters": [
{
"name": "api_key_name",
"in": "path",
"description": "Unique API key name",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "API key deleted successfully"
},
"404": {
"description": "API key with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown API key 'non-existent-api-key'",
"error_code": "UnknownApiKey",
"details": {
"name": "non-existent-api-key"
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/config": {
"get": {
"tags": [
"Configuration"
],
"summary": "Retrieve general configuration.",
"operationId": "get_config",
"responses": {
"200": {
"description": "The response body contains basic configuration information about this host.",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Configuration"
}
}
}
},
"500": {
"description": "Request failed.",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/config/demos": {
"get": {
"tags": [
"Configuration"
],
"summary": "Retrieve the list of demos.",
"operationId": "get_config_demos",
"responses": {
"200": {
"description": "List of demos",
"content": {
"application/json": {
"schema": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Demo"
}
}
}
}
},
"500": {
"description": "Failed to read demos from the demos directories",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/metrics": {
"get": {
"tags": [
"Metrics"
],
"summary": "Retrieve the metrics of all running pipelines belonging to this tenant.",
"description": "The metrics are collected by making individual HTTP requests to `/metrics`\nendpoint of each pipeline, of which only successful responses are included\nin the returned list.",
"operationId": "get_metrics",
"responses": {
"200": {
"description": "Metrics of all running pipelines belonging to this tenant in Prometheus format",
"content": {
"text/plain": {
"schema": {
"type": "string",
"format": "binary"
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines": {
"get": {
"tags": [
"Pipeline management"
],
"summary": "Retrieve the list of pipelines.",
"description": "Configure which fields are included using the `selector` query parameter.",
"operationId": "list_pipelines",
"parameters": [
{
"name": "selector",
"in": "query",
"description": "The `selector` parameter limits which fields are returned for a pipeline.\nLimiting which fields is particularly handy for instance when frequently\nmonitoring over low bandwidth connections while being only interested\nin pipeline status.",
"required": false,
"schema": {
"$ref": "#/components/schemas/PipelineFieldSelector"
}
}
],
"responses": {
"200": {
"description": "List of pipelines retrieved successfully",
"content": {
"application/json": {
"schema": {
"type": "array",
"items": {
"$ref": "#/components/schemas/PipelineSelectedInfo"
}
},
"example": [
{
"id": "67e55044-10b1-426f-9247-bb680e5fe0c8",
"name": "example1",
"description": "Description of the pipeline example1",
"created_at": "1970-01-01T00:00:00Z",
"version": 4,
"platform_version": "v0",
"runtime_config": {
"workers": 16,
"storage": {
"backend": {
"name": "default"
},
"min_storage_bytes": null,
"compression": "default",
"cache_mib": null
},
"fault_tolerance": null,
"cpu_profiler": true,
"tracing": false,
"tracing_endpoint_jaeger": "",
"min_batch_size_records": 0,
"max_buffering_delay_usecs": 0,
"resources": {
"cpu_cores_min": null,
"cpu_cores_max": null,
"memory_mb_min": null,
"memory_mb_max": null,
"storage_mb_max": null,
"storage_class": null
},
"clock_resolution_usecs": 100000,
"pin_cpus": [],
"provisioning_timeout_secs": null,
"max_parallel_connector_init": null
},
"program_code": "CREATE TABLE table1 ( col1 INT );",
"udf_rust": "",
"udf_toml": "",
"program_config": {
"profile": "optimized",
"cache": true
},
"program_version": 2,
"program_status": "Pending",
"program_status_since": "1970-01-01T00:00:00Z",
"program_error": {
"sql_compilation": null,
"rust_compilation": null,
"system_error": null
},
"program_info": null,
"deployment_status": "Shutdown",
"deployment_status_since": "1970-01-01T00:00:00Z",
"deployment_desired_status": "Shutdown",
"deployment_error": null,
"refresh_version": 4
},
{
"id": "67e55044-10b1-426f-9247-bb680e5fe0c9",
"name": "example2",
"description": "Description of the pipeline example2",
"created_at": "1970-01-01T00:00:00Z",
"version": 1,
"platform_version": "v0",
"runtime_config": {
"workers": 10,
"storage": {
"backend": {
"name": "default"
},
"min_storage_bytes": null,
"compression": "default",
"cache_mib": null
},
"fault_tolerance": null,
"cpu_profiler": false,
"tracing": false,
"tracing_endpoint_jaeger": "",
"min_batch_size_records": 100000,
"max_buffering_delay_usecs": 0,
"resources": {
"cpu_cores_min": null,
"cpu_cores_max": null,
"memory_mb_min": 1000,
"memory_mb_max": null,
"storage_mb_max": 10000,
"storage_class": null
},
"clock_resolution_usecs": 100000,
"pin_cpus": [],
"provisioning_timeout_secs": 1200,
"max_parallel_connector_init": 10
},
"program_code": "CREATE TABLE table2 ( col2 VARCHAR );",
"udf_rust": "",
"udf_toml": "",
"program_config": {
"profile": "unoptimized",
"cache": true
},
"program_version": 1,
"program_status": "Pending",
"program_status_since": "1970-01-01T00:00:00Z",
"program_error": {
"sql_compilation": null,
"rust_compilation": null,
"system_error": null
},
"program_info": null,
"deployment_status": "Shutdown",
"deployment_status_since": "1970-01-01T00:00:00Z",
"deployment_desired_status": "Shutdown",
"deployment_error": null,
"refresh_version": 1
}
]
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
},
"post": {
"tags": [
"Pipeline management"
],
"summary": "Create a new pipeline.",
"operationId": "post_pipeline",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/PostPutPipeline"
},
"example": {
"name": "example1",
"description": "Description of the pipeline example1",
"runtime_config": {
"workers": 16,
"storage": {
"backend": {
"name": "default"
},
"min_storage_bytes": null,
"compression": "default",
"cache_mib": null
},
"fault_tolerance": null,
"cpu_profiler": true,
"tracing": false,
"tracing_endpoint_jaeger": "",
"min_batch_size_records": 0,
"max_buffering_delay_usecs": 0,
"resources": {
"cpu_cores_min": null,
"cpu_cores_max": null,
"memory_mb_min": null,
"memory_mb_max": null,
"storage_mb_max": null,
"storage_class": null
},
"clock_resolution_usecs": 100000,
"pin_cpus": [],
"provisioning_timeout_secs": null,
"max_parallel_connector_init": null
},
"program_code": "CREATE TABLE table1 ( col1 INT );",
"udf_rust": null,
"udf_toml": null,
"program_config": {
"profile": "optimized",
"cache": true
}
}
}
},
"required": true
},
"responses": {
"201": {
"description": "Pipeline successfully created",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/PipelineInfo"
},
"example": {
"id": "67e55044-10b1-426f-9247-bb680e5fe0c8",
"name": "example1",
"description": "Description of the pipeline example1",
"created_at": "1970-01-01T00:00:00Z",
"version": 4,
"platform_version": "v0",
"runtime_config": {
"workers": 16,
"storage": {
"backend": {
"name": "default"
},
"min_storage_bytes": null,
"compression": "default",
"cache_mib": null
},
"fault_tolerance": null,
"cpu_profiler": true,
"tracing": false,
"tracing_endpoint_jaeger": "",
"min_batch_size_records": 0,
"max_buffering_delay_usecs": 0,
"resources": {
"cpu_cores_min": null,
"cpu_cores_max": null,
"memory_mb_min": null,
"memory_mb_max": null,
"storage_mb_max": null,
"storage_class": null
},
"clock_resolution_usecs": 100000,
"pin_cpus": [],
"provisioning_timeout_secs": null,
"max_parallel_connector_init": null
},
"program_code": "CREATE TABLE table1 ( col1 INT );",
"udf_rust": "",
"udf_toml": "",
"program_config": {
"profile": "optimized",
"cache": true
},
"program_version": 2,
"program_status": "Pending",
"program_status_since": "1970-01-01T00:00:00Z",
"program_error": {
"sql_compilation": null,
"rust_compilation": null,
"system_error": null
},
"program_info": null,
"deployment_status": "Shutdown",
"deployment_status_since": "1970-01-01T00:00:00Z",
"deployment_desired_status": "Shutdown",
"deployment_error": null,
"refresh_version": 4
}
}
}
},
"400": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Name does not match pattern": {
"value": {
"message": "Name 'name-with-invalid-char-#' contains characters which are not lowercase (a-z), uppercase (A-Z), numbers (0-9), underscores (_) or hyphens (-)",
"error_code": "NameDoesNotMatchPattern",
"details": {
"name": "name-with-invalid-char-#"
}
}
}
}
}
}
},
"409": {
"description": "Cannot create pipeline as the name already exists",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "An entity with this name already exists",
"error_code": "DuplicateName",
"details": null
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}": {
"get": {
"tags": [
"Pipeline management"
],
"summary": "Retrieve a pipeline.",
"description": "Configure which fields are included using the `selector` query parameter.",
"operationId": "get_pipeline",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "selector",
"in": "query",
"description": "The `selector` parameter limits which fields are returned for a pipeline.\nLimiting which fields is particularly handy for instance when frequently\nmonitoring over low bandwidth connections while being only interested\nin pipeline status.",
"required": false,
"schema": {
"$ref": "#/components/schemas/PipelineFieldSelector"
}
}
],
"responses": {
"200": {
"description": "Pipeline retrieved successfully",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/PipelineSelectedInfo"
},
"example": {
"id": "67e55044-10b1-426f-9247-bb680e5fe0c8",
"name": "example1",
"description": "Description of the pipeline example1",
"created_at": "1970-01-01T00:00:00Z",
"version": 4,
"platform_version": "v0",
"runtime_config": {
"workers": 16,
"storage": {
"backend": {
"name": "default"
},
"min_storage_bytes": null,
"compression": "default",
"cache_mib": null
},
"fault_tolerance": null,
"cpu_profiler": true,
"tracing": false,
"tracing_endpoint_jaeger": "",
"min_batch_size_records": 0,
"max_buffering_delay_usecs": 0,
"resources": {
"cpu_cores_min": null,
"cpu_cores_max": null,
"memory_mb_min": null,
"memory_mb_max": null,
"storage_mb_max": null,
"storage_class": null
},
"clock_resolution_usecs": 100000,
"pin_cpus": [],
"provisioning_timeout_secs": null,
"max_parallel_connector_init": null
},
"program_code": "CREATE TABLE table1 ( col1 INT );",
"udf_rust": "",
"udf_toml": "",
"program_config": {
"profile": "optimized",
"cache": true
},
"program_version": 2,
"program_status": "Pending",
"program_status_since": "1970-01-01T00:00:00Z",
"program_error": {
"sql_compilation": null,
"rust_compilation": null,
"system_error": null
},
"program_info": null,
"deployment_status": "Shutdown",
"deployment_status_since": "1970-01-01T00:00:00Z",
"deployment_desired_status": "Shutdown",
"deployment_error": null,
"refresh_version": 4
}
}
}
},
"404": {
"description": "Pipeline with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
},
"put": {
"tags": [
"Pipeline management"
],
"summary": "Fully update a pipeline if it already exists, otherwise create a new pipeline.",
"operationId": "put_pipeline",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/PostPutPipeline"
},
"example": {
"name": "example1",
"description": "Description of the pipeline example1",
"runtime_config": {
"workers": 16,
"storage": {
"backend": {
"name": "default"
},
"min_storage_bytes": null,
"compression": "default",
"cache_mib": null
},
"fault_tolerance": null,
"cpu_profiler": true,
"tracing": false,
"tracing_endpoint_jaeger": "",
"min_batch_size_records": 0,
"max_buffering_delay_usecs": 0,
"resources": {
"cpu_cores_min": null,
"cpu_cores_max": null,
"memory_mb_min": null,
"memory_mb_max": null,
"storage_mb_max": null,
"storage_class": null
},
"clock_resolution_usecs": 100000,
"pin_cpus": [],
"provisioning_timeout_secs": null,
"max_parallel_connector_init": null
},
"program_code": "CREATE TABLE table1 ( col1 INT );",
"udf_rust": null,
"udf_toml": null,
"program_config": {
"profile": "optimized",
"cache": true
}
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Pipeline successfully updated",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/PipelineInfo"
},
"example": {
"id": "67e55044-10b1-426f-9247-bb680e5fe0c8",
"name": "example1",
"description": "Description of the pipeline example1",
"created_at": "1970-01-01T00:00:00Z",
"version": 4,
"platform_version": "v0",
"runtime_config": {
"workers": 16,
"storage": {
"backend": {
"name": "default"
},
"min_storage_bytes": null,
"compression": "default",
"cache_mib": null
},
"fault_tolerance": null,
"cpu_profiler": true,
"tracing": false,
"tracing_endpoint_jaeger": "",
"min_batch_size_records": 0,
"max_buffering_delay_usecs": 0,
"resources": {
"cpu_cores_min": null,
"cpu_cores_max": null,
"memory_mb_min": null,
"memory_mb_max": null,
"storage_mb_max": null,
"storage_class": null
},
"clock_resolution_usecs": 100000,
"pin_cpus": [],
"provisioning_timeout_secs": null,
"max_parallel_connector_init": null
},
"program_code": "CREATE TABLE table1 ( col1 INT );",
"udf_rust": "",
"udf_toml": "",
"program_config": {
"profile": "optimized",
"cache": true
},
"program_version": 2,
"program_status": "Pending",
"program_status_since": "1970-01-01T00:00:00Z",
"program_error": {
"sql_compilation": null,
"rust_compilation": null,
"system_error": null
},
"program_info": null,
"deployment_status": "Shutdown",
"deployment_status_since": "1970-01-01T00:00:00Z",
"deployment_desired_status": "Shutdown",
"deployment_error": null,
"refresh_version": 4
}
}
}
},
"201": {
"description": "Pipeline successfully created",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/PipelineInfo"
},
"example": {
"id": "67e55044-10b1-426f-9247-bb680e5fe0c8",
"name": "example1",
"description": "Description of the pipeline example1",
"created_at": "1970-01-01T00:00:00Z",
"version": 4,
"platform_version": "v0",
"runtime_config": {
"workers": 16,
"storage": {
"backend": {
"name": "default"
},
"min_storage_bytes": null,
"compression": "default",
"cache_mib": null
},
"fault_tolerance": null,
"cpu_profiler": true,
"tracing": false,
"tracing_endpoint_jaeger": "",
"min_batch_size_records": 0,
"max_buffering_delay_usecs": 0,
"resources": {
"cpu_cores_min": null,
"cpu_cores_max": null,
"memory_mb_min": null,
"memory_mb_max": null,
"storage_mb_max": null,
"storage_class": null
},
"clock_resolution_usecs": 100000,
"pin_cpus": [],
"provisioning_timeout_secs": null,
"max_parallel_connector_init": null
},
"program_code": "CREATE TABLE table1 ( col1 INT );",
"udf_rust": "",
"udf_toml": "",
"program_config": {
"profile": "optimized",
"cache": true
},
"program_version": 2,
"program_status": "Pending",
"program_status_since": "1970-01-01T00:00:00Z",
"program_error": {
"sql_compilation": null,
"rust_compilation": null,
"system_error": null
},
"program_info": null,
"deployment_status": "Shutdown",
"deployment_status_since": "1970-01-01T00:00:00Z",
"deployment_desired_status": "Shutdown",
"deployment_error": null,
"refresh_version": 4
}
}
}
},
"400": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Cannot update non-shutdown pipeline": {
"value": {
"message": "Cannot update a pipeline which is not fully shutdown. Shutdown the pipeline first by invoking the '/shutdown' endpoint.",
"error_code": "CannotUpdateNonShutdownPipeline",
"details": null
}
},
"Name does not match pattern": {
"value": {
"message": "Name 'name-with-invalid-char-#' contains characters which are not lowercase (a-z), uppercase (A-Z), numbers (0-9), underscores (_) or hyphens (-)",
"error_code": "NameDoesNotMatchPattern",
"details": {
"name": "name-with-invalid-char-#"
}
}
}
}
}
}
},
"409": {
"description": "Cannot rename pipeline as the new name already exists",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "An entity with this name already exists",
"error_code": "DuplicateName",
"details": null
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
},
"delete": {
"tags": [
"Pipeline management"
],
"summary": "Delete a pipeline.",
"operationId": "delete_pipeline",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Pipeline successfully deleted"
},
"400": {
"description": "Pipeline needs to be shutdown to be deleted",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Cannot delete a pipeline which is not fully shutdown. Shutdown the pipeline first by invoking the '/shutdown' endpoint.",
"error_code": "CannotDeleteNonShutdownPipeline",
"details": null
}
}
}
},
"404": {
"description": "Pipeline with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
},
"patch": {
"tags": [
"Pipeline management"
],
"summary": "Partially update a pipeline.",
"operationId": "patch_pipeline",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/PatchPipeline"
},
"example": {
"name": null,
"description": "This is a new description",
"runtime_config": null,
"program_code": "CREATE TABLE table3 ( col3 INT );",
"udf_rust": null,
"udf_toml": null,
"program_config": null
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Pipeline successfully updated",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/PipelineInfo"
},
"example": {
"id": "67e55044-10b1-426f-9247-bb680e5fe0c8",
"name": "example1",
"description": "Description of the pipeline example1",
"created_at": "1970-01-01T00:00:00Z",
"version": 4,
"platform_version": "v0",
"runtime_config": {
"workers": 16,
"storage": {
"backend": {
"name": "default"
},
"min_storage_bytes": null,
"compression": "default",
"cache_mib": null
},
"fault_tolerance": null,
"cpu_profiler": true,
"tracing": false,
"tracing_endpoint_jaeger": "",
"min_batch_size_records": 0,
"max_buffering_delay_usecs": 0,
"resources": {
"cpu_cores_min": null,
"cpu_cores_max": null,
"memory_mb_min": null,
"memory_mb_max": null,
"storage_mb_max": null,
"storage_class": null
},
"clock_resolution_usecs": 100000,
"pin_cpus": [],
"provisioning_timeout_secs": null,
"max_parallel_connector_init": null
},
"program_code": "CREATE TABLE table1 ( col1 INT );",
"udf_rust": "",
"udf_toml": "",
"program_config": {
"profile": "optimized",
"cache": true
},
"program_version": 2,
"program_status": "Pending",
"program_status_since": "1970-01-01T00:00:00Z",
"program_error": {
"sql_compilation": null,
"rust_compilation": null,
"system_error": null
},
"program_info": null,
"deployment_status": "Shutdown",
"deployment_status_since": "1970-01-01T00:00:00Z",
"deployment_desired_status": "Shutdown",
"deployment_error": null,
"refresh_version": 4
}
}
}
},
"400": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Cannot update non-shutdown pipeline": {
"value": {
"message": "Cannot update a pipeline which is not fully shutdown. Shutdown the pipeline first by invoking the '/shutdown' endpoint.",
"error_code": "CannotUpdateNonShutdownPipeline",
"details": null
}
},
"Name does not match pattern": {
"value": {
"message": "Name 'name-with-invalid-char-#' contains characters which are not lowercase (a-z), uppercase (A-Z), numbers (0-9), underscores (_) or hyphens (-)",
"error_code": "NameDoesNotMatchPattern",
"details": {
"name": "name-with-invalid-char-#"
}
}
}
}
}
}
},
"404": {
"description": "Pipeline with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
},
"409": {
"description": "Cannot rename pipeline as the name already exists",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "An entity with this name already exists",
"error_code": "DuplicateName",
"details": null
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/checkpoint": {
"post": {
"tags": [
"Pipeline interaction"
],
"summary": "Checkpoint a running or paused pipeline.",
"operationId": "checkpoint_pipeline",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Checkpoint completed"
},
"404": {
"description": "Pipeline with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"503": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Disconnected during response": {
"value": {
"message": "Unable to reach pipeline to interact due to: the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs.",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs."
}
}
},
"Pipeline is currently unavailable": {
"value": {
"message": "Unable to reach pipeline to interact due to: deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again"
}
}
},
"Pipeline is not deployed": {
"value": {
"message": "Unable to interact with pipeline because the deployment status ('shutdown') is not one of the deployed statuses ('running', 'paused' or 'unavailable') -- to resolve this: wait for the pipeline to become running or paused",
"error_code": "PipelineInteractionNotDeployed",
"details": {
"status": "Shutdown",
"desired_status": "Running"
}
}
},
"Response timeout": {
"value": {
"message": "Unable to reach pipeline to interact due to: timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)"
}
}
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/circuit_profile": {
"get": {
"tags": [
"Pipeline interaction"
],
"summary": "Retrieve the circuit performance profile of a running or paused pipeline.",
"operationId": "get_pipeline_circuit_profile",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Circuit performance profile",
"content": {
"application/zip": {
"schema": {
"type": "object"
}
}
}
},
"404": {
"description": "Pipeline with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"503": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Disconnected during response": {
"value": {
"message": "Unable to reach pipeline to interact due to: the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs.",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs."
}
}
},
"Pipeline is currently unavailable": {
"value": {
"message": "Unable to reach pipeline to interact due to: deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again"
}
}
},
"Pipeline is not deployed": {
"value": {
"message": "Unable to interact with pipeline because the deployment status ('shutdown') is not one of the deployed statuses ('running', 'paused' or 'unavailable') -- to resolve this: wait for the pipeline to become running or paused",
"error_code": "PipelineInteractionNotDeployed",
"details": {
"status": "Shutdown",
"desired_status": "Running"
}
}
},
"Response timeout": {
"value": {
"message": "Unable to reach pipeline to interact due to: timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)"
}
}
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/egress/{table_name}": {
"post": {
"tags": [
"Pipeline interaction"
],
"summary": "Subscribe to a stream of updates from a SQL view or table.",
"description": "The pipeline responds with a continuous stream of changes to the specified\ntable or view, encoded using the format specified in the `?format=`\nparameter. Updates are split into `Chunk`s.\n\nThe pipeline continues sending updates until the client closes the\nconnection or the pipeline is shut down.",
"operationId": "http_output",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "table_name",
"in": "path",
"description": "SQL table name. Unquoted SQL names have to be capitalized. Quoted SQL names have to exactly match the case from the SQL program.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "format",
"in": "query",
"description": "Output data format, e.g., 'csv' or 'json'.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "array",
"in": "query",
"description": "Set to `true` to group updates in this stream into JSON arrays (used in conjunction with `format=json`). The default value is `false`",
"required": false,
"schema": {
"type": "boolean",
"nullable": true
}
},
{
"name": "backpressure",
"in": "query",
"description": "Apply backpressure on the pipeline when the HTTP client cannot receive data fast enough.\n When this flag is set to false (the default), the HTTP connector drops data chunks if the client is not keeping up with its output. This prevents a slow HTTP client from slowing down the entire pipeline.\n When the flag is set to true, the connector waits for the client to receive each chunk and blocks the pipeline if the client cannot keep up.",
"required": false,
"schema": {
"type": "boolean",
"nullable": true
}
}
],
"responses": {
"200": {
"description": "Connection to the endpoint successfully established. The body of the response contains a stream of data chunks.",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Chunk"
}
}
}
},
"400": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"404": {
"description": "Pipeline and/or table/view with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Pipeline with that name does not exist": {
"value": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"503": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Disconnected during response": {
"value": {
"message": "Unable to reach pipeline to interact due to: the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs.",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs."
}
}
},
"Pipeline is currently unavailable": {
"value": {
"message": "Unable to reach pipeline to interact due to: deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again"
}
}
},
"Pipeline is not deployed": {
"value": {
"message": "Unable to interact with pipeline because the deployment status ('shutdown') is not one of the deployed statuses ('running', 'paused' or 'unavailable') -- to resolve this: wait for the pipeline to become running or paused",
"error_code": "PipelineInteractionNotDeployed",
"details": {
"status": "Shutdown",
"desired_status": "Running"
}
}
},
"Response timeout": {
"value": {
"message": "Unable to reach pipeline to interact due to: timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)"
}
}
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/heap_profile": {
"get": {
"tags": [
"Pipeline interaction"
],
"summary": "Retrieve the heap profile of a running or paused pipeline.",
"operationId": "get_pipeline_heap_profile",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Heap usage profile as a gzipped protobuf that can be inspected by the pprof tool",
"content": {
"application/protobuf": {
"schema": {
"type": "string",
"format": "binary"
}
}
}
},
"400": {
"description": "Getting a heap profile is not supported on this platform",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"404": {
"description": "Pipeline with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"503": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Disconnected during response": {
"value": {
"message": "Unable to reach pipeline to interact due to: the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs.",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs."
}
}
},
"Pipeline is currently unavailable": {
"value": {
"message": "Unable to reach pipeline to interact due to: deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again"
}
}
},
"Pipeline is not deployed": {
"value": {
"message": "Unable to interact with pipeline because the deployment status ('shutdown') is not one of the deployed statuses ('running', 'paused' or 'unavailable') -- to resolve this: wait for the pipeline to become running or paused",
"error_code": "PipelineInteractionNotDeployed",
"details": {
"status": "Shutdown",
"desired_status": "Running"
}
}
},
"Response timeout": {
"value": {
"message": "Unable to reach pipeline to interact due to: timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)"
}
}
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/ingress/{table_name}": {
"post": {
"tags": [
"Pipeline interaction"
],
"summary": "Push data to a SQL table.",
"description": "The client sends data encoded using the format specified in the `?format=`\nparameter as a body of the request. The contents of the data must match\nthe SQL table schema specified in `table_name`\n\nThe pipeline ingests data as it arrives without waiting for the end of\nthe request. Successful HTTP response indicates that all data has been\ningested successfully.",
"operationId": "http_input",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "table_name",
"in": "path",
"description": "SQL table name. Unquoted SQL names have to be capitalized. Quoted SQL names have to exactly match the case from the SQL program.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "force",
"in": "query",
"description": "When `true`, push data to the pipeline even if the pipeline is paused. The default value is `false`",
"required": true,
"schema": {
"type": "boolean"
}
},
{
"name": "format",
"in": "query",
"description": "Input data format, e.g., 'csv' or 'json'.",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "array",
"in": "query",
"description": "Set to `true` if updates in this stream are packaged into JSON arrays (used in conjunction with `format=json`). The default values is `false`.",
"required": false,
"schema": {
"type": "boolean",
"nullable": true
}
},
{
"name": "update_format",
"in": "query",
"description": "JSON data change event format (used in conjunction with `format=json`). The default value is 'insert_delete'.",
"required": false,
"schema": {
"allOf": [
{
"$ref": "#/components/schemas/JsonUpdateFormat"
}
],
"nullable": true
}
}
],
"requestBody": {
"description": "Input data in the specified format",
"content": {
"text/plain": {
"schema": {
"type": "string"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Data successfully delivered to the pipeline"
},
"400": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"404": {
"description": "Pipeline and/or table with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Pipeline with that name does not exist": {
"value": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"503": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Disconnected during response": {
"value": {
"message": "Unable to reach pipeline to interact due to: the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs.",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs."
}
}
},
"Pipeline is currently unavailable": {
"value": {
"message": "Unable to reach pipeline to interact due to: deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again"
}
}
},
"Pipeline is not deployed": {
"value": {
"message": "Unable to interact with pipeline because the deployment status ('shutdown') is not one of the deployed statuses ('running', 'paused' or 'unavailable') -- to resolve this: wait for the pipeline to become running or paused",
"error_code": "PipelineInteractionNotDeployed",
"details": {
"status": "Shutdown",
"desired_status": "Running"
}
}
},
"Response timeout": {
"value": {
"message": "Unable to reach pipeline to interact due to: timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)"
}
}
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/logs": {
"get": {
"tags": [
"Pipeline interaction"
],
"summary": "Retrieve logs of a (non-shutdown) pipeline as a stream.",
"description": "The logs stream catches up to the extent of the internally configured per-pipeline\ncircular logs buffer (limited to a certain byte size and number of lines, whichever\nis reached first). After the catch-up, new lines are pushed whenever they become\navailable.\n\nThe logs stream will end when the pipeline is shut down. It is also possible for the\nlogs stream to end prematurely due to the runner back-end (temporarily) losing\nconnectivity to the pipeline instance (e.g., process). In this case, it is needed\nto issue again a new request to this endpoint.",
"operationId": "get_pipeline_logs",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Pipeline logs retrieved successfully",
"content": {
"text/plain": {
"schema": {
"type": "string",
"format": "binary"
}
}
}
},
"404": {
"description": "Pipeline with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"503": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Pipeline is shutdown": {
"value": {
"message": "Unable to interact with pipeline runner because the deployment status is 'shutdown' -- start the pipeline or wait if it has already been started",
"error_code": "RunnerInteractionShutdown",
"details": null
}
},
"Runner response timeout": {
"value": {
"message": "Unable to reach pipeline runner to interact due to: timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)",
"error_code": "RunnerInteractionUnreachable",
"details": {
"error": "timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)"
}
}
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/metrics": {
"get": {
"tags": [
"Pipeline interaction"
],
"summary": "Retrieve circuit metrics of a running or paused pipeline.",
"operationId": "get_pipeline_metrics",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "format",
"in": "query",
"required": false,
"schema": {
"$ref": "#/components/schemas/MetricsFormat"
}
}
],
"responses": {
"200": {
"description": "Pipeline circuit metrics retrieved successfully",
"content": {
"application/json": {
"schema": {
"type": "object"
}
}
}
},
"404": {
"description": "Pipeline with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"503": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Disconnected during response": {
"value": {
"message": "Unable to reach pipeline to interact due to: the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs.",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs."
}
}
},
"Pipeline is currently unavailable": {
"value": {
"message": "Unable to reach pipeline to interact due to: deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again"
}
}
},
"Pipeline is not deployed": {
"value": {
"message": "Unable to interact with pipeline because the deployment status ('shutdown') is not one of the deployed statuses ('running', 'paused' or 'unavailable') -- to resolve this: wait for the pipeline to become running or paused",
"error_code": "PipelineInteractionNotDeployed",
"details": {
"status": "Shutdown",
"desired_status": "Running"
}
}
},
"Response timeout": {
"value": {
"message": "Unable to reach pipeline to interact due to: timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)"
}
}
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/query": {
"get": {
"tags": [
"Pipeline interaction"
],
"summary": "Execute an ad-hoc SQL query in a running or paused pipeline.",
"description": "The evaluation is not incremental.",
"operationId": "pipeline_adhoc_sql",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "sql",
"in": "query",
"description": "SQL query to execute",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "format",
"in": "query",
"description": "Input data format, e.g., 'text', 'json' or 'parquet'",
"required": true,
"schema": {
"$ref": "#/components/schemas/AdHocResultFormat"
}
}
],
"responses": {
"200": {
"description": "Ad-hoc SQL query result",
"content": {
"text/plain": {
"schema": {
"type": "string",
"format": "binary"
}
}
}
},
"400": {
"description": "Invalid SQL query",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"404": {
"description": "Pipeline with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"503": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Disconnected during response": {
"value": {
"message": "Unable to reach pipeline to interact due to: the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs.",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs."
}
}
},
"Pipeline is currently unavailable": {
"value": {
"message": "Unable to reach pipeline to interact due to: deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again"
}
}
},
"Pipeline is not deployed": {
"value": {
"message": "Unable to interact with pipeline because the deployment status ('shutdown') is not one of the deployed statuses ('running', 'paused' or 'unavailable') -- to resolve this: wait for the pipeline to become running or paused",
"error_code": "PipelineInteractionNotDeployed",
"details": {
"status": "Shutdown",
"desired_status": "Running"
}
}
},
"Response timeout": {
"value": {
"message": "Unable to reach pipeline to interact due to: timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)"
}
}
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/stats": {
"get": {
"tags": [
"Pipeline interaction"
],
"summary": "Retrieve statistics (e.g., performance counters) of a running or paused pipeline.",
"operationId": "get_pipeline_stats",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Pipeline statistics retrieved successfully",
"content": {
"application/json": {
"schema": {
"type": "object"
}
}
}
},
"404": {
"description": "Pipeline with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"503": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Disconnected during response": {
"value": {
"message": "Unable to reach pipeline to interact due to: the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs.",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs."
}
}
},
"Pipeline is currently unavailable": {
"value": {
"message": "Unable to reach pipeline to interact due to: deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again"
}
}
},
"Pipeline is not deployed": {
"value": {
"message": "Unable to interact with pipeline because the deployment status ('shutdown') is not one of the deployed statuses ('running', 'paused' or 'unavailable') -- to resolve this: wait for the pipeline to become running or paused",
"error_code": "PipelineInteractionNotDeployed",
"details": {
"status": "Shutdown",
"desired_status": "Running"
}
}
},
"Response timeout": {
"value": {
"message": "Unable to reach pipeline to interact due to: timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)"
}
}
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/tables/{table_name}/connectors/{connector_name}/stats": {
"get": {
"tags": [
"Pipeline interaction"
],
"summary": "Retrieve the status of an input connector.",
"operationId": "get_pipeline_input_connector_status",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "table_name",
"in": "path",
"description": "Unique table name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "connector_name",
"in": "path",
"description": "Unique input connector name",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Input connector status retrieved successfully",
"content": {
"application/json": {
"schema": {
"type": "object"
}
}
}
},
"404": {
"description": "Pipeline, table and/or input connector with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Pipeline with that name does not exist": {
"value": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"503": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Disconnected during response": {
"value": {
"message": "Unable to reach pipeline to interact due to: the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs.",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs."
}
}
},
"Pipeline is currently unavailable": {
"value": {
"message": "Unable to reach pipeline to interact due to: deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again"
}
}
},
"Pipeline is not deployed": {
"value": {
"message": "Unable to interact with pipeline because the deployment status ('shutdown') is not one of the deployed statuses ('running', 'paused' or 'unavailable') -- to resolve this: wait for the pipeline to become running or paused",
"error_code": "PipelineInteractionNotDeployed",
"details": {
"status": "Shutdown",
"desired_status": "Running"
}
}
},
"Response timeout": {
"value": {
"message": "Unable to reach pipeline to interact due to: timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)"
}
}
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/tables/{table_name}/connectors/{connector_name}/{action}": {
"post": {
"tags": [
"Pipeline interaction"
],
"summary": "Start (resume) or pause the input connector.",
"description": "The following values of the `action` argument are accepted: `start` and `pause`.\n\nInput connectors can be in either the `Running` or `Paused` state. By default,\nconnectors are initialized in the `Running` state when a pipeline is deployed.\nIn this state, the connector actively fetches data from its configured data\nsource and forwards it to the pipeline. If needed, a connector can be created\nin the `Paused` state by setting its\n[`paused`](https://docs.feldera.com/connectors/#generic-attributes) property\nto `true`. When paused, the connector remains idle until reactivated using the\n`start` command. Conversely, a connector in the `Running` state can be paused\nat any time by issuing the `pause` command.\n\nThe current connector state can be retrieved via the\n`GET /v0/pipelines/{pipeline_name}/stats` endpoint.\n\nNote that only if both the pipeline *and* the connector state is `Running`,\nis the input connector active.\n```text\nPipeline state Connector state Connector is active?\n-------------- --------------- --------------------\nPaused Paused No\nPaused Running No\nRunning Paused No\nRunning Running Yes\n```",
"operationId": "post_pipeline_input_connector_action",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "table_name",
"in": "path",
"description": "Unique table name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "connector_name",
"in": "path",
"description": "Unique input connector name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "action",
"in": "path",
"description": "Input connector action (one of: start, pause)",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Action has been processed"
},
"404": {
"description": "Pipeline, table and/or input connector with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Pipeline with that name does not exist": {
"value": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"503": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Disconnected during response": {
"value": {
"message": "Unable to reach pipeline to interact due to: the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs.",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs."
}
}
},
"Pipeline is currently unavailable": {
"value": {
"message": "Unable to reach pipeline to interact due to: deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again"
}
}
},
"Pipeline is not deployed": {
"value": {
"message": "Unable to interact with pipeline because the deployment status ('shutdown') is not one of the deployed statuses ('running', 'paused' or 'unavailable') -- to resolve this: wait for the pipeline to become running or paused",
"error_code": "PipelineInteractionNotDeployed",
"details": {
"status": "Shutdown",
"desired_status": "Running"
}
}
},
"Response timeout": {
"value": {
"message": "Unable to reach pipeline to interact due to: timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)"
}
}
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/views/{view_name}/connectors/{connector_name}/stats": {
"get": {
"tags": [
"Pipeline interaction"
],
"summary": "Retrieve the status of an output connector.",
"operationId": "get_pipeline_output_connector_status",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "view_name",
"in": "path",
"description": "Unique SQL view name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "connector_name",
"in": "path",
"description": "Unique output connector name",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Output connector status retrieved successfully",
"content": {
"application/json": {
"schema": {
"type": "object"
}
}
}
},
"404": {
"description": "Pipeline, view and/or output connector with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Pipeline with that name does not exist": {
"value": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
},
"503": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Disconnected during response": {
"value": {
"message": "Unable to reach pipeline to interact due to: the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs.",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "the pipeline disconnected while it was processing this HTTP request. This could be because the pipeline either (a) encountered a fatal error or panic, (b) was shutdown, or (c) experienced network issues -- retrying might help in the last case. Alternatively, check the pipeline logs."
}
}
},
"Pipeline is currently unavailable": {
"value": {
"message": "Unable to reach pipeline to interact due to: deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "deployment status is currently 'unavailable' -- wait for it to become 'running' or 'paused' again"
}
}
},
"Pipeline is not deployed": {
"value": {
"message": "Unable to interact with pipeline because the deployment status ('shutdown') is not one of the deployed statuses ('running', 'paused' or 'unavailable') -- to resolve this: wait for the pipeline to become running or paused",
"error_code": "PipelineInteractionNotDeployed",
"details": {
"status": "Shutdown",
"desired_status": "Running"
}
}
},
"Response timeout": {
"value": {
"message": "Unable to reach pipeline to interact due to: timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)",
"error_code": "PipelineInteractionUnreachable",
"details": {
"error": "timeout (10s) was reached: this means the pipeline took too long to respond -- this can simply be because the request was too difficult to process in time, or other reasons (e.g., deadlock): the pipeline logs might contain additional information (original send request error: Timeout while waiting for response)"
}
}
}
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
},
"/v0/pipelines/{pipeline_name}/{action}": {
"post": {
"tags": [
"Pipeline management"
],
"summary": "Sets the desired deployment state of a pipeline.",
"description": "The desired state is set based on the `action` path parameter:\n- `/start` sets desired state to `Running`\n- `/pause` sets desired state to `Paused`\n- `/shutdown` sets desired state to `Shutdown`\n\nThe endpoint returns immediately after setting the desired state.\nThe relevant procedure to get to the desired state is performed asynchronously,\nand, as such, progress should be monitored by polling the pipeline using the\n`GET` endpoints.\n\nNote the following:\n- A shutdown pipeline can be started through calling either `/start` or `/pause`\n- Both starting as running and resuming a pipeline is done by calling `/start`\n- Both starting as paused and pausing a pipeline is done by calling `/pause`",
"operationId": "post_pipeline_action",
"parameters": [
{
"name": "pipeline_name",
"in": "path",
"description": "Unique pipeline name",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "action",
"in": "path",
"description": "Pipeline action (one of: start, pause, shutdown)",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"202": {
"description": "Action is accepted and is being performed"
},
"400": {
"description": "Action could not be performed",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"examples": {
"Illegal action": {
"value": {
"message": "Deployment status (current: 'ShuttingDown', desired: 'Shutdown') cannot have desired changed to 'Running'. Cannot restart the pipeline while it is shutting down. Wait for the shutdown to complete before starting a new instance of the pipeline.",
"error_code": "IllegalPipelineAction",
"details": {
"hint": "Cannot restart the pipeline while it is shutting down. Wait for the shutdown to complete before starting a new instance of the pipeline.",
"status": "ShuttingDown",
"desired_status": "Shutdown",
"requested_desired_status": "Running"
}
}
},
"Invalid action": {
"value": {
"message": "Invalid pipeline action 'dance'; valid actions are: 'start', 'pause', or 'shutdown'",
"error_code": "InvalidPipelineAction",
"details": {
"action": "dance"
}
}
}
}
}
}
},
"404": {
"description": "Pipeline with that name does not exist",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
},
"example": {
"message": "Unknown pipeline name 'non-existent-pipeline'",
"error_code": "UnknownPipelineName",
"details": {
"pipeline_name": "non-existent-pipeline"
}
}
}
}
},
"500": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
},
"security": [
{
"JSON web token (JWT) or API key": []
}
]
}
}
},
"components": {
"schemas": {
"AdHocInputConfig": {
"type": "object",
"description": "Configuration for inserting data with ad-hoc queries\n\nAn ad-hoc input adapters cannot be usefully configured as part of pipeline\nconfiguration. Instead, use ad-hoc queries through the UI, the REST API, or\nthe `fda` command-line tool.",
"required": [
"name"
],
"properties": {
"name": {
"type": "string",
"description": "Autogenerated name."
}
}
},
"AdHocResultFormat": {
"type": "string",
"description": "URL-encoded `format` argument to the `/query` endpoint.",
"enum": [
"text",
"json",
"parquet",
"arrow_ipc"
]
},
"AdhocQueryArgs": {
"type": "object",
"description": "URL-encoded arguments to the `/query` endpoint.",
"required": [
"sql"
],
"properties": {
"format": {
"$ref": "#/components/schemas/AdHocResultFormat"
},
"sql": {
"type": "string",
"description": "The SQL query to run."
}
}
},
"ApiKeyDescr": {
"type": "object",
"description": "API key descriptor.",
"required": [
"id",
"name",
"scopes"
],
"properties": {
"id": {
"$ref": "#/components/schemas/ApiKeyId"
},
"name": {
"type": "string"
},
"scopes": {
"type": "array",
"items": {
"$ref": "#/components/schemas/ApiPermission"
}
}
}
},
"ApiKeyId": {
"type": "string",
"format": "uuid",
"description": "API key identifier."
},
"ApiPermission": {
"type": "string",
"description": "Permission types for invoking API endpoints.",
"enum": [
"Read",
"Write"
]
},
"AuthProvider": {
"oneOf": [
{
"type": "object",
"required": [
"AwsCognito"
],
"properties": {
"AwsCognito": {
"$ref": "#/components/schemas/ProviderAwsCognito"
}
}
},
{
"type": "object",
"required": [
"GoogleIdentity"
],
"properties": {
"GoogleIdentity": {
"$ref": "#/components/schemas/ProviderGoogleIdentity"
}
}
}
]
},
"Chunk": {
"type": "object",
"description": "A set of updates to a SQL table or view.\n\nThe `sequence_number` field stores the offset of the chunk relative to the\nstart of the stream and can be used to implement reliable delivery.\nThe payload is stored in the `bin_data`, `text_data`, or `json_data` field\ndepending on the data format used.",
"required": [
"sequence_number"
],
"properties": {
"bin_data": {
"type": "string",
"format": "binary",
"description": "Base64 encoded binary payload, e.g., bincode.",
"nullable": true
},
"json_data": {
"type": "object",
"description": "JSON payload.",
"nullable": true
},
"sequence_number": {
"type": "integer",
"format": "int64",
"minimum": 0
},
"text_data": {
"type": "string",
"description": "Text payload, e.g., CSV.",
"nullable": true
}
}
},
"ColumnType": {
"type": "object",
"description": "A SQL column type description.\n\nMatches the Calcite JSON format.",
"required": [
"nullable"
],
"properties": {
"component": {
"allOf": [
{
"$ref": "#/components/schemas/ColumnType"
}
],
"nullable": true
},
"fields": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Field"
},
"description": "The fields of the type (if available).\n\nFor example this would specify the fields of a `CREATE TYPE` construct.\n\n```sql\nCREATE TYPE person_typ AS (\nfirstname VARCHAR(30),\nlastname VARCHAR(30),\naddress ADDRESS_TYP\n);\n```\n\nWould lead to the following `fields` value:\n\n```sql\n[\nColumnType { name: \"firstname, ... },\nColumnType { name: \"lastname\", ... },\nColumnType { name: \"address\", fields: [ ... ] }\n]\n```",
"nullable": true
},
"key": {
"allOf": [
{
"$ref": "#/components/schemas/ColumnType"
}
],
"nullable": true
},
"nullable": {
"type": "boolean",
"description": "Does the type accept NULL values?"
},
"precision": {
"type": "integer",
"format": "int64",
"description": "Precision of the type.\n\n# Examples\n- `VARCHAR` sets precision to `-1`.\n- `VARCHAR(255)` sets precision to `255`.\n- `BIGINT`, `DATE`, `FLOAT`, `DOUBLE`, `GEOMETRY`, etc. sets precision\nto None\n- `TIME`, `TIMESTAMP` set precision to `0`.",
"nullable": true
},
"scale": {
"type": "integer",
"format": "int64",
"description": "The scale of the type.\n\n# Example\n- `DECIMAL(1,2)` sets scale to `2`.",
"nullable": true
},
"type": {
"$ref": "#/components/schemas/SqlType"
},
"value": {
"allOf": [
{
"$ref": "#/components/schemas/ColumnType"
}
],
"nullable": true
}
}
},
"CompilationProfile": {
"type": "string",
"description": "Enumeration of possible compilation profiles that can be passed to the Rust compiler\nas an argument via `cargo build --profile <>`. A compilation profile affects among\nother things the compilation speed (how long till the program is ready to be run)\nand runtime speed (the performance while running).",
"enum": [
"dev",
"unoptimized",
"optimized"
]
},
"Configuration": {
"type": "object",
"required": [
"telemetry",
"edition",
"version",
"revision",
"changelog_url"
],
"properties": {
"changelog_url": {
"type": "string",
"description": "URL that navigates to the changelog of the current version"
},
"edition": {
"type": "string",
"description": "Feldera edition: \"Open source\" or \"Enterprise\""
},
"license_info": {
"allOf": [
{
"$ref": "#/components/schemas/LicenseInformation"
}
],
"nullable": true
},
"revision": {
"type": "string",
"description": "Specific revision corresponding to the edition `version` (e.g., git commit hash).\nThis is an empty string if it is unspecified."
},
"telemetry": {
"type": "string",
"description": "Telemetry key."
},
"update_info": {
"allOf": [
{
"$ref": "#/components/schemas/UpdateInformation"
}
],
"nullable": true
},
"version": {
"type": "string",
"description": "The version corresponding to the type of `edition`.\nFormat is `x.y.z`."
}
}
},
"ConnectorConfig": {
"allOf": [
{
"$ref": "#/components/schemas/OutputBufferConfig"
},
{
"type": "object",
"required": [
"transport"
],
"properties": {
"format": {
"allOf": [
{
"$ref": "#/components/schemas/FormatConfig"
}
],
"nullable": true
},
"index": {
"type": "string",
"description": "Name of the index that the connector is attached to.\n\nThis property is valid for output connectors only. It is used with data\ntransports and formats that expect output updates in the form of key/value\npairs, where the key typically represents a unique id associated with the\ntable or view.\n\nTo support such output formats, an output connector can be attached to an\nindex created using the SQL CREATE INDEX statement. An index of a table\nor view contains the same updates as the table or view itself, indexed by\none or more key columns.\n\nSee individual connector documentation for details on how they work\nwith indexes.",
"nullable": true
},
"labels": {
"type": "array",
"items": {
"type": "string"
},
"description": "Arbitrary user-defined text labels associated with the connector.\n\nThese labels can be used in conjunction with the `start_after` property\nto control the start order of connectors."
},
"max_batch_size": {
"type": "integer",
"format": "int64",
"description": "Maximum batch size, in records.\n\nThis is the maximum number of records to process in one batch through\nthe circuit. The time and space cost of processing a batch is\nasymptotically superlinear in the size of the batch, but very small\nbatches are less efficient due to constant factors.\n\nThis should usually be less than `max_queued_records`, to give the\nconnector a round-trip time to restart and refill the buffer while\nbatches are being processed.\n\nSome input adapters might not honor this setting.\n\nThe default is 10,000.",
"minimum": 0
},
"max_queued_records": {
"type": "integer",
"format": "int64",
"description": "Backpressure threshold.\n\nMaximal number of records queued by the endpoint before the endpoint\nis paused by the backpressure mechanism.\n\nFor input endpoints, this setting bounds the number of records that have\nbeen received from the input transport but haven't yet been consumed by\nthe circuit since the circuit, since the circuit is still busy processing\nprevious inputs.\n\nFor output endpoints, this setting bounds the number of records that have\nbeen produced by the circuit but not yet sent via the output transport endpoint\nnor stored in the output buffer (see `enable_output_buffer`).\n\nNote that this is not a hard bound: there can be a small delay between\nthe backpressure mechanism is triggered and the endpoint is paused, during\nwhich more data may be queued.\n\nThe default is 1 million.",
"minimum": 0
},
"paused": {
"type": "boolean",
"description": "Create connector in paused state.\n\nThe default is `false`."
},
"start_after": {
"type": "array",
"items": {
"type": "string"
},
"description": "Start the connector after all connectors with specified labels.\n\nThis property is used to control the start order of connectors.\nThe connector will not start until all connectors with the specified\nlabels have finished processing all inputs.",
"nullable": true
},
"transport": {
"$ref": "#/components/schemas/TransportConfig"
}
}
}
],
"description": "A data connector's configuration"
},
"DatagenInputConfig": {
"type": "object",
"description": "Configuration for generating random data for a table.",
"properties": {
"plan": {
"type": "array",
"items": {
"$ref": "#/components/schemas/GenerationPlan"
},
"description": "The sequence of generations to perform.\n\nIf not set, the generator will produce a single sequence with default settings.\nIf set, the generator will produce the specified sequences in sequential order.\n\nNote that if one of the sequences before the last one generates an unlimited number of rows\nthe following sequences will not be executed.",
"default": [
{
"rate": null,
"limit": null,
"worker_chunk_size": null,
"fields": {}
}
]
},
"seed": {
"type": "integer",
"format": "int64",
"description": "Optional seed for the random generator.\n\nSetting this to a fixed value will make the generator produce the same sequence of records\nevery time the pipeline is run.\n\n# Notes\n- To ensure the set of generated input records is deterministic across multiple runs,\napart from setting a seed, `workers` also needs to remain unchanged.\n- The input will arrive in non-deterministic order if `workers > 1`.",
"default": null,
"nullable": true,
"minimum": 0
},
"workers": {
"type": "integer",
"description": "Number of workers to use for generating data.",
"default": 1,
"minimum": 0
}
},
"additionalProperties": false
},
"DatagenStrategy": {
"type": "string",
"description": "Strategy used to generate values.",
"enum": [
"increment",
"uniform",
"zipf",
"word",
"words",
"sentence",
"sentences",
"paragraph",
"paragraphs",
"first_name",
"last_name",
"title",
"suffix",
"name",
"name_with_title",
"domain_suffix",
"email",
"username",
"password",
"field",
"position",
"seniority",
"job_title",
"ipv4",
"ipv6",
"ip",
"mac_address",
"user_agent",
"rfc_status_code",
"valid_status_code",
"company_suffix",
"company_name",
"buzzword",
"buzzword_middle",
"buzzword_tail",
"catch_phrase",
"bs_verb",
"bs_adj",
"bs_noun",
"bs",
"profession",
"industry",
"currency_code",
"currency_name",
"currency_symbol",
"credit_card_number",
"city_prefix",
"city_suffix",
"city_name",
"country_name",
"country_code",
"street_suffix",
"street_name",
"time_zone",
"state_name",
"state_abbr",
"secondary_address_type",
"secondary_address",
"zip_code",
"post_code",
"building_number",
"latitude",
"longitude",
"isbn",
"isbn13",
"isbn10",
"phone_number",
"cell_number",
"file_path",
"file_name",
"file_extension",
"dir_path"
]
},
"DeltaTableIngestMode": {
"type": "string",
"description": "Delta table read mode.\n\nThree options are available:\n\n* `snapshot` - read a snapshot of the table and stop.\n\n* `follow` - continuously ingest changes to the table, starting from a specified version\nor timestamp.\n\n* `snapshot_and_follow` - read a snapshot of the table before switching to continuous ingestion\nmode.",
"enum": [
"snapshot",
"follow",
"snapshot_and_follow",
"cdc"
]
},
"DeltaTableReaderConfig": {
"type": "object",
"description": "Delta table input connector configuration.",
"required": [
"uri",
"mode"
],
"properties": {
"cdc_delete_filter": {
"type": "string",
"description": "A predicate that determines whether the record represents a deletion.\n\nThis setting is only valid in the 'cdc' mode. It specifies a predicate applied to\neach row in the Delta table to determine whether the row represents a deletion event.\nIts value must be a valid Boolean SQL expression that can be used in a query of the\nform `SELECT * from <table> WHERE <cdc_delete_filter>`.",
"nullable": true
},
"cdc_order_by": {
"type": "string",
"description": "An expression that determines the ordering of updates in the Delta table.\n\nThis setting is only valid in the 'cdc' mode. It specifies a predicate applied to\neach row in the Delta table to determine the order in which updates in the table should\nbe applied. Its value must be a valid SQL expression that can be used in a query of the\nform `SELECT * from <table> ORDER BY <cdc_order_by>`.",
"nullable": true
},
"datetime": {
"type": "string",
"description": "Optional timestamp for the snapshot in the ISO-8601/RFC-3339 format, e.g.,\n\"2024-12-09T16:09:53+00:00\".\n\nWhen this option is set, the connector finds and opens the version of the table as of the\nspecified point in time (based on the server time recorded in the transaction log, not the\nevent time encoded in the data). In `snapshot` and `snapshot_and_follow` modes, it\nretrieves the snapshot of this version of the table. In `follow` and `snapshot_and_follow`\nmodes, it follows transaction log records **after** this version.\n\nNote: at most one of `version` and `datetime` options can be specified.\nWhen neither of the two options is specified, the latest committed version of the table\nis used.",
"nullable": true
},
"filter": {
"type": "string",
"description": "Optional row filter.\n\nWhen specified, only rows that satisfy the filter condition are read from the delta table.\nThe condition must be a valid SQL Boolean expression that can be used in\nthe `where` clause of the `select * from my_table where ...` query.",
"nullable": true
},
"mode": {
"$ref": "#/components/schemas/DeltaTableIngestMode"
},
"num_parsers": {
"type": "integer",
"format": "int32",
"description": "The number of parallel parsing tasks the connector uses to process data read from the\ntable. Increasing this value can enhance performance by allowing more concurrent processing.\nRecommended range: 1–10. The default is 4.",
"minimum": 0
},
"snapshot_filter": {
"type": "string",
"description": "Optional snapshot filter.\n\nThis option is only valid when `mode` is set to `snapshot` or `snapshot_and_follow`.\n\nWhen specified, only rows that satisfy the filter condition are included in the\nsnapshot. The condition must be a valid SQL Boolean expression that can be used in\nthe `where` clause of the `select * from snapshot where ...` query.\n\nUnlike the `filter` option, which applies to all records retrieved from the table, this\nfilter only applies to rows in the initial snapshot of the table.\nFor instance, it can be used to specify the range of event times to include in the snapshot,\ne.g.: `ts BETWEEN TIMESTAMP '2005-01-01 00:00:00' AND TIMESTAMP '2010-12-31 23:59:59'`.\n\nThis option can be used together with the `filter` option. During the initial snapshot,\nonly rows that satisfy both `filter` and `snapshot_filter` are retrieved from the Delta table.\nWhen subsequently following changes in the the transaction log (`mode = snapshot_and_follow`),\nall rows that meet the `filter` condition are ingested, regardless of `snapshot_filter`.",
"nullable": true
},
"timestamp_column": {
"type": "string",
"description": "Table column that serves as an event timestamp.\n\nWhen this option is specified, and `mode` is one of `snapshot` or `snapshot_and_follow`,\ntable rows are ingested in the timestamp order, respecting the\n[`LATENESS`](https://docs.feldera.com/sql/streaming#lateness-expressions)\nproperty of the column: each ingested row has a timestamp no more than `LATENESS`\ntime units earlier than the most recent timestamp of any previously ingested row.\nThe ingestion is performed by partitioning the table into timestamp ranges of width\n`LATENESS`. Each range is processed sequentially, in increasing timestamp order.\n\n# Example\n\nConsider a table with timestamp column of type `TIMESTAMP` and lateness attribute\n`INTERVAL 1 DAY`. Assuming that the oldest timestamp in the table is\n`2024-01-01T00:00:00``, the connector will fetch all records with timestamps\nfrom `2024-01-01`, then all records for `2024-01-02`, `2024-01-03`, etc., until all records\nin the table have been ingested.\n\n# Requirements\n\n* The timestamp column must be of a supported type: integer, `DATE`, or `TIMESTAMP`.\n* The timestamp column must be declared with non-zero `LATENESS`.\n* For efficient ingest, the table must be optimized for timestamp-based\nqueries using partitioning, Z-ordering, or liquid clustering.",
"nullable": true
},
"uri": {
"type": "string",
"description": "Table URI.\n\nExample: \"s3://feldera-fraud-detection-data/demographics_train\""
},
"version": {
"type": "integer",
"format": "int64",
"description": "Optional table version.\n\nWhen this option is set, the connector finds and opens the specified version of the table.\nIn `snapshot` and `snapshot_and_follow` modes, it retrieves the snapshot of this version of\nthe table. In `follow` and `snapshot_and_follow` modes, it follows transaction log records\n**after** this version.\n\nNote: at most one of `version` and `datetime` options can be specified.\nWhen neither of the two options is specified, the latest committed version of the table\nis used.",
"nullable": true
}
},
"additionalProperties": {
"type": "string",
"description": "Storage options for configuring backend object store.\n\nFor specific options available for different storage backends, see:\n* [Azure options](https://docs.rs/object_store/latest/object_store/azure/enum.AzureConfigKey.html)\n* [Amazon S3 options](https://docs.rs/object_store/latest/object_store/aws/enum.AmazonS3ConfigKey.html)\n* [Google Cloud Storage options](https://docs.rs/object_store/latest/object_store/gcp/enum.GoogleConfigKey.html)"
}
},
"DeltaTableWriteMode": {
"type": "string",
"description": "Delta table write mode.\n\nDetermines how the Delta table connector handles an existing table at the target location.",
"enum": [
"append",
"truncate",
"error_if_exists"
]
},
"DeltaTableWriterConfig": {
"type": "object",
"description": "Delta table output connector configuration.",
"required": [
"uri"
],
"properties": {
"mode": {
"$ref": "#/components/schemas/DeltaTableWriteMode"
},
"uri": {
"type": "string",
"description": "Table URI."
}
},
"additionalProperties": {
"type": "string",
"description": "Storage options for configuring backend object store.\n\nFor specific options available for different storage backends, see:\n* [Azure options](https://docs.rs/object_store/latest/object_store/azure/enum.AzureConfigKey.html)\n* [Amazon S3 options](https://docs.rs/object_store/latest/object_store/aws/enum.AmazonS3ConfigKey.html)\n* [Google Cloud Storage options](https://docs.rs/object_store/latest/object_store/gcp/enum.GoogleConfigKey.html)"
}
},
"Demo": {
"type": "object",
"required": [
"name",
"title",
"description",
"program_code",
"udf_rust",
"udf_toml"
],
"properties": {
"description": {
"type": "string",
"description": "Description of the demo (parsed from SQL preamble)."
},
"name": {
"type": "string",
"description": "Name of the demo (parsed from SQL preamble)."
},
"program_code": {
"type": "string",
"description": "Program SQL code."
},
"title": {
"type": "string",
"description": "Title of the demo (parsed from SQL preamble)."
},
"udf_rust": {
"type": "string",
"description": "User defined function (UDF) Rust code."
},
"udf_toml": {
"type": "string",
"description": "User defined function (UDF) TOML dependencies."
}
}
},
"DisplaySchedule": {
"oneOf": [
{
"type": "string",
"description": "Display it only once: after dismissal do not show it again",
"enum": [
"Once"
]
},
{
"type": "string",
"description": "Display it again the next session if it is dismissed",
"enum": [
"Session"
]
},
{
"type": "object",
"required": [
"Every"
],
"properties": {
"Every": {
"type": "object",
"description": "Display it again after a certain period of time after it is dismissed",
"required": [
"seconds"
],
"properties": {
"seconds": {
"type": "integer",
"format": "int64",
"minimum": 0
}
}
}
}
},
{
"type": "string",
"description": "Always display it, do not allow it to be dismissed",
"enum": [
"Always"
]
}
]
},
"ErrorResponse": {
"type": "object",
"description": "Information returned by REST API endpoints on error.",
"required": [
"message",
"error_code",
"details"
],
"properties": {
"details": {
"type": "object",
"description": "Detailed error metadata.\nThe contents of this field is determined by `error_code`."
},
"error_code": {
"type": "string",
"description": "Error code is a string that specifies this error type.",
"example": "CodeSpecifyingErrorType"
},
"message": {
"type": "string",
"description": "Human-readable error message.",
"example": "Explanation of the error that occurred."
}
}
},
"Field": {
"allOf": [
{
"$ref": "#/components/schemas/SqlIdentifier"
},
{
"type": "object",
"required": [
"columntype"
],
"properties": {
"columntype": {
"$ref": "#/components/schemas/ColumnType"
},
"default": {
"type": "string",
"nullable": true
},
"lateness": {
"type": "string",
"nullable": true
},
"watermark": {
"type": "string",
"nullable": true
}
}
}
],
"description": "A SQL field.\n\nMatches the SQL compiler JSON format."
},
"FileInputConfig": {
"type": "object",
"description": "Configuration for reading data from a file with `FileInputTransport`",
"required": [
"path"
],
"properties": {
"buffer_size_bytes": {
"type": "integer",
"description": "Read buffer size.\n\nDefault: when this parameter is not specified, a platform-specific\ndefault is used.",
"nullable": true,
"minimum": 0
},
"follow": {
"type": "boolean",
"description": "Enable file following.\n\nWhen `false`, the endpoint outputs an `InputConsumer::eoi`\nmessage and stops upon reaching the end of file. When `true`, the\nendpoint will keep watching the file and outputting any new content\nappended to it."
},
"path": {
"type": "string",
"description": "File path."
}
}
},
"FileOutputConfig": {
"type": "object",
"description": "Configuration for writing data to a file with `FileOutputTransport`.",
"required": [
"path"
],
"properties": {
"path": {
"type": "string",
"description": "File path."
}
}
},
"FormatConfig": {
"type": "object",
"description": "Data format specification used to parse raw data received from the\nendpoint or to encode data sent to the endpoint.",
"required": [
"name"
],
"properties": {
"config": {
"type": "object",
"description": "Format-specific parser or encoder configuration."
},
"name": {
"type": "string",
"description": "Format name, e.g., \"csv\", \"json\", \"bincode\", etc."
}
}
},
"FtConfig": {
"type": "object",
"description": "Fault-tolerance configuration for runtime startup.",
"properties": {
"checkpoint_interval_secs": {
"type": "integer",
"format": "int64",
"description": "Interval between automatic checkpoints, in seconds.\n\nThe default is 60 seconds. A value of 0 disables automatic\ncheckpointing.",
"default": 60,
"minimum": 0
}
}
},
"GenerationPlan": {
"type": "object",
"description": "A random generation plan for a table that generates either a limited amount of rows or runs continuously.",
"properties": {
"fields": {
"type": "object",
"description": "Specifies the values that the generator should produce.",
"default": {},
"additionalProperties": {
"$ref": "#/components/schemas/RngFieldSettings"
}
},
"limit": {
"type": "integer",
"description": "Total number of new rows to generate.\n\nIf not set, the generator will produce new/unique records as long as the pipeline is running.\nIf set to 0, the table will always remain empty.\nIf set, the generator will produce new records until the specified limit is reached.\n\nNote that if the table has one or more primary keys that don't use the `increment` strategy to\ngenerate the key there is a potential that an update is generated instead of an insert. In\nthis case it's possible the total number of records is less than the specified limit.",
"default": null,
"nullable": true,
"minimum": 0
},
"rate": {
"type": "integer",
"format": "int32",
"description": "Non-zero number of rows to generate per second.\n\nIf not set, the generator will produce rows as fast as possible.",
"default": null,
"nullable": true,
"minimum": 0
},
"worker_chunk_size": {
"type": "integer",
"description": "When multiple workers are used, each worker will pick a consecutive \"chunk\" of\nrecords to generate.\n\nBy default, if not specified, the generator will use the formula `min(rate, 10_000)`\nto determine it. This works well in most situations. However, if you're\nrunning tests with lateness and many workers you can e.g., reduce the\nchunk size to make sure a smaller range of records is being ingested in parallel.\n\n# Example\nAssume you generate a total of 125 records with 4 workers and a chunk size of 25.\nIn this case, worker A will generate records 0..25, worker B will generate records 25..50,\netc. A, B, C, and D will generate records in parallel. The first worker to finish its chunk\nwill pick up the last chunk of records (100..125) to generate.",
"default": null,
"nullable": true,
"minimum": 0
}
},
"additionalProperties": false
},
"GetPipelineParameters": {
"type": "object",
"description": "Query parameters to GET a pipeline or a list of pipelines.",
"properties": {
"selector": {
"$ref": "#/components/schemas/PipelineFieldSelector"
}
}
},
"GlueCatalogConfig": {
"type": "object",
"description": "AWS Glue catalog config.",
"properties": {
"glue.access-key-id": {
"type": "string",
"description": "Access key id used to access the Glue catalog.",
"nullable": true
},
"glue.endpoint": {
"type": "string",
"description": "Configure an alternative endpoint of the Glue service for Glue catalog to access.\n\nExample: `\"https://glue.us-east-1.amazonaws.com\"`",
"nullable": true
},
"glue.id": {
"type": "string",
"description": "The 12-digit ID of the Glue catalog.",
"nullable": true
},
"glue.profile-name": {
"type": "string",
"description": "Profile used to access the Glue catalog.",
"nullable": true
},
"glue.region": {
"type": "string",
"description": "Region of the Glue catalog.",
"nullable": true
},
"glue.secret-access-key": {
"type": "string",
"description": "Secret access key used to access the Glue catalog.",
"nullable": true
},
"glue.session-token": {
"type": "string",
"nullable": true
},
"glue.warehouse": {
"type": "string",
"description": "Location for table metadata.\n\nExample: `\"s3://my-data-warehouse/tables/\"`",
"nullable": true
}
}
},
"HttpInputConfig": {
"type": "object",
"description": "Configuration for reading data via HTTP.\n\nHTTP input adapters cannot be usefully configured as part of pipeline\nconfiguration. Instead, instantiate them through the REST API as\n`/pipelines/{pipeline_name}/ingress/{table_name}`.",
"required": [
"name"
],
"properties": {
"name": {
"type": "string",
"description": "Autogenerated name."
}
}
},
"IcebergCatalogType": {
"type": "string",
"enum": [
"rest",
"glue"
]
},
"IcebergIngestMode": {
"type": "string",
"description": "Iceberg table read mode.\n\nThree options are available:\n\n* `snapshot` - read a snapshot of the table and stop.\n\n* `follow` - continuously ingest changes to the table, starting from a specified snapshot\nor timestamp.\n\n* `snapshot_and_follow` - read a snapshot of the table before switching to continuous ingestion\nmode.",
"enum": [
"snapshot",
"follow",
"snapshot_and_follow"
]
},
"IcebergReaderConfig": {
"allOf": [
{
"$ref": "#/components/schemas/GlueCatalogConfig"
},
{
"$ref": "#/components/schemas/RestCatalogConfig"
},
{
"type": "object",
"required": [
"mode"
],
"properties": {
"catalog_type": {
"allOf": [
{
"$ref": "#/components/schemas/IcebergCatalogType"
}
],
"nullable": true
},
"datetime": {
"type": "string",
"description": "Optional timestamp for the snapshot in the ISO-8601/RFC-3339 format, e.g.,\n\"2024-12-09T16:09:53+00:00\".\n\nWhen this option is set, the connector finds and opens the snapshot of the table as of the\nspecified point in time (based on the server time recorded in the transaction\nlog, not the event time encoded in the data). In `snapshot` and `snapshot_and_follow`\nmodes, it retrieves this snapshot. In `follow` and `snapshot_and_follow` modes, it\nfollows transaction log records **after** this snapshot.\n\nNote: at most one of `snapshot_id` and `datetime` options can be specified.\nWhen neither of the two options is specified, the latest committed version of the table\nis used.",
"nullable": true
},
"metadata_location": {
"type": "string",
"description": "Location of the table metadata JSON file.\n\nThis propery is used to access an Iceberg table without a catalog. It is mutually\nexclusive with the `catalog_type` property.",
"nullable": true
},
"mode": {
"$ref": "#/components/schemas/IcebergIngestMode"
},
"snapshot_filter": {
"type": "string",
"description": "Optional row filter.\n\nThis option is only valid when `mode` is set to `snapshot` or `snapshot_and_follow`.\n\nWhen specified, only rows that satisfy the filter condition are included in the\nsnapshot. The condition must be a valid SQL Boolean expression that can be used in\nthe `where` clause of the `select * from snapshot where ...` query.\n\nThis option can be used to specify the range of event times to include in the snapshot,\ne.g.: `ts BETWEEN '2005-01-01 00:00:00' AND '2010-12-31 23:59:59'`.",
"nullable": true
},
"snapshot_id": {
"type": "integer",
"format": "int64",
"description": "Optional snapshot id.\n\nWhen this option is set, the connector finds the specified snapshot of the table.\nIn `snapshot` and `snapshot_and_follow` modes, it loads this snapshot.\nIn `follow` and `snapshot_and_follow` modes, it follows table updates\n**after** this snapshot.\n\nNote: at most one of `snapshot_id` and `datetime` options can be specified.\nWhen neither of the two options is specified, the latest committed version of the table\nis used.",
"nullable": true
},
"table_name": {
"type": "string",
"description": "Specifies the Iceberg table name in the \"namespace.table\" format.\n\nThis option is applicable when an Iceberg catalog is configured using the `catalog_type` property.",
"nullable": true
},
"timestamp_column": {
"type": "string",
"description": "Table column that serves as an event timestamp.\n\nWhen this option is specified, and `mode` is one of `snapshot` or `snapshot_and_follow`,\ntable rows are ingested in the timestamp order, respecting the\n[`LATENESS`](https://docs.feldera.com/sql/streaming#lateness-expressions)\nproperty of the column: each ingested row has a timestamp no more than `LATENESS`\ntime units earlier than the most recent timestamp of any previously ingested row.\nThe ingestion is performed by partitioning the table into timestamp ranges of width\n`LATENESS`. Each range is processed sequentially, in increasing timestamp order.\n\n# Example\n\nConsider a table with timestamp column of type `TIMESTAMP` and lateness attribute\n`INTERVAL 1 DAY`. Assuming that the oldest timestamp in the table is\n`2024-01-01T00:00:00``, the connector will fetch all records with timestamps\nfrom `2024-01-01`, then all records for `2024-01-02`, `2024-01-03`, etc., until all records\nin the table have been ingested.\n\n# Requirements\n\n* The timestamp column must be of a supported type: integer, `DATE`, or `TIMESTAMP`.\n* The timestamp column must be declared with non-zero `LATENESS`.\n* For efficient ingest, the table must be optimized for timestamp-based\nqueries using partitioning, Z-ordering, or liquid clustering.",
"nullable": true
}
},
"additionalProperties": {
"type": "string",
"description": "Storage options for configuring backend object store.\n\nSee the [list of available options in PyIceberg documentation](https://py.iceberg.apache.org/configuration/#fileio)."
}
}
],
"description": "Iceberg input connector configuration."
},
"InputEndpointConfig": {
"allOf": [
{
"$ref": "#/components/schemas/ConnectorConfig"
},
{
"type": "object",
"required": [
"stream"
],
"properties": {
"stream": {
"type": "string",
"description": "The name of the input stream of the circuit that this endpoint is\nconnected to."
}
}
}
],
"description": "Describes an input connector configuration"
},
"IntervalUnit": {
"type": "string",
"description": "The specified units for SQL Interval types.\n\n`INTERVAL 1 DAY`, `INTERVAL 1 DAY TO HOUR`, `INTERVAL 1 DAY TO MINUTE`,\nwould yield `Day`, `DayToHour`, `DayToMinute`, as the `IntervalUnit` respectively.",
"enum": [
"Day",
"DayToHour",
"DayToMinute",
"DayToSecond",
"Hour",
"HourToMinute",
"HourToSecond",
"Minute",
"MinuteToSecond",
"Month",
"Second",
"Year",
"YearToMonth"
]
},
"JsonLines": {
"type": "string",
"description": "Whether JSON values can span multiple lines.",
"enum": [
"multiple",
"single"
]
},
"JsonUpdateFormat": {
"type": "string",
"description": "Supported JSON data change event formats.\n\nEach element in a JSON-formatted input stream specifies\nan update to one or more records in an input table. We support\nseveral different ways to represent such updates.\n\n### `InsertDelete`\n\nEach element in the input stream consists of an \"insert\" or \"delete\"\ncommand and a record to be inserted to or deleted from the input table.\n\n```json\n{\"insert\": {\"column1\": \"hello, world!\", \"column2\": 100}}\n```\n\n### `Weighted`\n\nEach element in the input stream consists of a record and a weight\nwhich indicates how many times the row appears.\n\n```json\n{\"weight\": 2, \"data\": {\"column1\": \"hello, world!\", \"column2\": 100}}\n```\n\nNote that the line above would be equivalent to the following input in the `InsertDelete` format:\n\n```json\n{\"insert\": {\"column1\": \"hello, world!\", \"column2\": 100}}\n{\"insert\": {\"column1\": \"hello, world!\", \"column2\": 100}}\n```\n\nSimilarly, negative weights are equivalent to deletions:\n\n```json\n{\"weight\": -1, \"data\": {\"column1\": \"hello, world!\", \"column2\": 100}}\n```\n\nis equivalent to in the `InsertDelete` format:\n\n```json\n{\"delete\": {\"column1\": \"hello, world!\", \"column2\": 100}}\n```\n\n### `Debezium`\n\nDebezium CDC format. Refer to [Debezium input connector documentation](https://docs.feldera.com/connectors/sources/debezium) for details.\n\n### `Snowflake`\n\nUses flat structure so that fields can get parsed directly into SQL\ncolumns. Defines three metadata fields:\n\n* `__action` - \"insert\" or \"delete\"\n* `__stream_id` - unique 64-bit ID of the output stream (records within\na stream are totally ordered)\n* `__seq_number` - monotonically increasing sequence number relative to\nthe start of the stream.\n\n```json\n{\"PART\":1,\"VENDOR\":2,\"EFFECTIVE_SINCE\":\"2019-05-21\",\"PRICE\":\"10000\",\"__action\":\"insert\",\"__stream_id\":4523666124030717756,\"__seq_number\":1}\n```\n\n### `Raw`\n\nThis format is suitable for insert-only streams (no deletions).\nEach element in the input stream contains a record without any\nadditional envelope that gets inserted in the input table.",
"enum": [
"insert_delete",
"weighted",
"debezium",
"snowflake",
"raw",
"redis"
]
},
"KafkaHeader": {
"type": "object",
"description": "Kafka message header.",
"required": [
"key"
],
"properties": {
"key": {
"type": "string"
},
"value": {
"allOf": [
{
"$ref": "#/components/schemas/KafkaHeaderValue"
}
],
"nullable": true
}
}
},
"KafkaHeaderValue": {
"type": "string",
"format": "binary",
"description": "Kafka header value encoded as a UTF-8 string or a byte array."
},
"KafkaInputConfig": {
"type": "object",
"description": "Configuration for reading data from Kafka topics with `InputTransport`.",
"required": [
"topic"
],
"properties": {
"group_join_timeout_secs": {
"type": "integer",
"format": "int32",
"description": "Maximum timeout in seconds to wait for the endpoint to join the Kafka\nconsumer group during initialization.",
"minimum": 0
},
"log_level": {
"allOf": [
{
"$ref": "#/components/schemas/KafkaLogLevel"
}
],
"nullable": true
},
"poller_threads": {
"type": "integer",
"description": "Set to 1 or more to fix the number of threads used to poll\n`rdkafka`. Multiple threads can increase performance with small Kafka\nmessages; for large messages, one thread is enough. In either case, too\nmany threads can harm performance. If unset, the default is 3, which\nhelps with small messages but will not harm performance with large\nmessagee",
"nullable": true,
"minimum": 0
},
"start_from": {
"$ref": "#/components/schemas/KafkaStartFromConfig"
},
"topic": {
"type": "string",
"description": "Topic to subscribe to."
}
},
"additionalProperties": {
"type": "string",
"description": "Options passed directly to `rdkafka`.\n\n[`librdkafka` options](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md)\nused to configure the Kafka consumer.\n\nThis input connector does not use consumer groups, so options related to\nconsumer groups are rejected, including:\n\n* `group.id`, if present, is ignored.\n* `auto.offset.reset` (use `start_from` instead).\n* \"enable.auto.commit\", if present, must be set to \"false\".\n* \"enable.auto.offset.store\", if present, must be set to \"false\"."
}
},
"KafkaLogLevel": {
"type": "string",
"description": "Kafka logging levels.",
"enum": [
"emerg",
"alert",
"critical",
"error",
"warning",
"notice",
"info",
"debug"
]
},
"KafkaOutputConfig": {
"type": "object",
"description": "Configuration for writing data to a Kafka topic with `OutputTransport`.",
"required": [
"topic"
],
"properties": {
"fault_tolerance": {
"allOf": [
{
"$ref": "#/components/schemas/KafkaOutputFtConfig"
}
],
"nullable": true
},
"headers": {
"type": "array",
"items": {
"$ref": "#/components/schemas/KafkaHeader"
},
"description": "Kafka headers to be added to each message produced by this connector."
},
"initialization_timeout_secs": {
"type": "integer",
"format": "int32",
"description": "Maximum timeout in seconds to wait for the endpoint to connect to\na Kafka broker.\n\nDefaults to 60.",
"minimum": 0
},
"kafka_service": {
"type": "string",
"description": "If specified, this service is used to provide defaults for the Kafka options.",
"nullable": true
},
"log_level": {
"allOf": [
{
"$ref": "#/components/schemas/KafkaLogLevel"
}
],
"nullable": true
},
"topic": {
"type": "string",
"description": "Topic to write to."
}
},
"additionalProperties": {
"type": "string",
"description": "Options passed directly to `rdkafka`.\n\nSee [`librdkafka` options](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md)\nused to configure the Kafka producer."
}
},
"KafkaOutputFtConfig": {
"type": "object",
"description": "Fault tolerance configuration for Kafka output connector.",
"properties": {
"consumer_options": {
"type": "object",
"description": "Options passed to `rdkafka` for consumers only, as documented at\n[`librdkafka`\noptions](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md).\n\nThese options override `kafka_options` for consumers, and may be empty.",
"default": {},
"additionalProperties": {
"type": "string"
}
},
"producer_options": {
"type": "object",
"description": "Options passed to `rdkafka` for producers only, as documented at\n[`librdkafka`\noptions](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md).\n\nThese options override `kafka_options` for producers, and may be empty.",
"default": {},
"additionalProperties": {
"type": "string"
}
}
}
},
"KafkaStartFromConfig": {
"oneOf": [
{
"type": "string",
"description": "Start from the beginning of the topic.",
"enum": [
"earliest"
]
},
{
"type": "string",
"description": "Start from the current end of the topic.\n\nThis will only read any data that is added to the topic after the\nconnector initializes.",
"enum": [
"latest"
]
},
{
"type": "object",
"required": [
"offsets"
],
"properties": {
"offsets": {
"type": "array",
"items": {
"type": "integer",
"format": "int64"
},
"description": "Start from particular offsets in the topic.\n\nThe number of offsets must match the number of partitions in the topic."
}
}
}
],
"description": "Where to begin reading a Kafka topic."
},
"LicenseInformation": {
"type": "object",
"required": [
"expires_in_seconds",
"expires_at",
"is_expired",
"is_trial",
"description_html",
"extension_url",
"remind_starting_at",
"remind_schedule"
],
"properties": {
"description_html": {
"type": "string",
"description": "Optional description of the advantages of extending the license / upgrading from a trial"
},
"expires_at": {
"type": "string",
"format": "date-time",
"description": "Timestamp at which the license expires"
},
"expires_in_seconds": {
"type": "integer",
"format": "int64",
"description": "Duration until the license expires",
"minimum": 0
},
"extension_url": {
"type": "string",
"description": "URL that navigates the user to extend / upgrade their license"
},
"is_expired": {
"type": "boolean",
"description": "Whether the license is expired"
},
"is_trial": {
"type": "boolean",
"description": "Whether the license is a trial"
},
"remind_schedule": {
"$ref": "#/components/schemas/DisplaySchedule"
},
"remind_starting_at": {
"type": "string",
"format": "date-time",
"description": "Timestamp from which the user should be reminded of the license expiring soon"
}
}
},
"MetricsFormat": {
"type": "string",
"description": "Circuit metrics output format.\n- `prometheus`: [format](https://github.com/prometheus/docs/blob/main/content/docs/instrumenting/exposition_formats.md) expected by Prometheus\n- `json`: JSON format",
"enum": [
"prometheus",
"json"
]
},
"MetricsParameters": {
"type": "object",
"description": "Query parameters to retrieve pipeline circuit metrics.",
"properties": {
"format": {
"$ref": "#/components/schemas/MetricsFormat"
}
}
},
"NewApiKeyRequest": {
"type": "object",
"description": "Request to create a new API key.",
"required": [
"name"
],
"properties": {
"name": {
"type": "string",
"description": "Key name.",
"example": "my-api-key"
}
}
},
"NewApiKeyResponse": {
"type": "object",
"description": "Response to a successful API key creation.",
"required": [
"id",
"name",
"api_key"
],
"properties": {
"api_key": {
"type": "string",
"description": "Generated secret API key. There is no way to retrieve this\nkey again through the API, so store it securely.",
"example": "apikey:v5y5QNtlPNVMwkmNjKwFU8bbIu5lMge3yHbyddxAOdXlEo84SEoNn32DUhQaf1KLeI9aOOfnJjhQ1pYzMrU4wQXON6pm6BS7Zgzj46U2b8pwz1280vYBEtx41hiDBRP"
},
"id": {
"$ref": "#/components/schemas/ApiKeyId"
},
"name": {
"type": "string",
"description": "API key name provided by the user.",
"example": "my-api-key"
}
}
},
"NexmarkInputConfig": {
"type": "object",
"description": "Configuration for generating Nexmark input data.\n\nThis connector must be used exactly three times in a pipeline if it is used\nat all, once for each [`NexmarkTable`].",
"required": [
"table"
],
"properties": {
"options": {
"allOf": [
{
"$ref": "#/components/schemas/NexmarkInputOptions"
}
],
"nullable": true
},
"table": {
"$ref": "#/components/schemas/NexmarkTable"
}
}
},
"NexmarkInputOptions": {
"type": "object",
"description": "Configuration for generating Nexmark input data.",
"properties": {
"batch_size_per_thread": {
"type": "integer",
"format": "int64",
"description": "Number of events to generate and submit together, per thread.\n\nEach thread generates this many records, which are then combined with\nthe records generated by the other threads, to form combined input\nbatches of size `threads × batch_size_per_thread`.",
"default": 1000,
"minimum": 0
},
"events": {
"type": "integer",
"format": "int64",
"description": "Number of events to generate.",
"default": 100000000,
"minimum": 0
},
"max_step_size_per_thread": {
"type": "integer",
"format": "int64",
"description": "Maximum number of events to submit in a single step, per thread.\n\nThis should really be per worker thread, not per generator thread, but\nthe connector does not know how many worker threads there are.\n\nThis stands in for `max_batch_size` from the connector configuration\nbecause it must be a constant across all three of the nexmark tables.",
"default": 10000,
"minimum": 0
},
"threads": {
"type": "integer",
"description": "Number of event generator threads.\n\nIt's reasonable to choose the same number of generator threads as worker\nthreads.",
"default": 4,
"minimum": 0
}
}
},
"NexmarkTable": {
"type": "string",
"description": "Table in Nexmark.",
"enum": [
"bid",
"auction",
"person"
]
},
"ObjectStorageConfig": {
"type": "object",
"required": [
"url"
],
"properties": {
"url": {
"type": "string",
"description": "URL.\n\nThe following URL schemes are supported:\n\n* S3:\n- `s3://<bucket>/<path>`\n- `s3a://<bucket>/<path>`\n- `https://s3.<region>.amazonaws.com/<bucket>`\n- `https://<bucket>.s3.<region>.amazonaws.com`\n- `https://ACCOUNT_ID.r2.cloudflarestorage.com/bucket`\n* Google Cloud Storage:\n- `gs://<bucket>/<path>`\n* Microsoft Azure Blob Storage:\n- `abfs[s]://<container>/<path>` (according to [fsspec](https://github.com/fsspec/adlfs))\n- `abfs[s]://<file_system>@<account_name>.dfs.core.windows.net/<path>`\n- `abfs[s]://<file_system>@<account_name>.dfs.fabric.microsoft.com/<path>`\n- `az://<container>/<path>` (according to [fsspec](https://github.com/fsspec/adlfs))\n- `adl://<container>/<path>` (according to [fsspec](https://github.com/fsspec/adlfs))\n- `azure://<container>/<path>` (custom)\n- `https://<account>.dfs.core.windows.net`\n- `https://<account>.blob.core.windows.net`\n- `https://<account>.blob.core.windows.net/<container>`\n- `https://<account>.dfs.fabric.microsoft.com`\n- `https://<account>.dfs.fabric.microsoft.com/<container>`\n- `https://<account>.blob.fabric.microsoft.com`\n- `https://<account>.blob.fabric.microsoft.com/<container>`\n\nSettings derived from the URL will override other settings."
}
},
"additionalProperties": {
"type": "string",
"description": "Additional options as key-value pairs.\n\nThe following keys are supported:\n\n* S3:\n- `access_key_id`: AWS Access Key.\n- `secret_access_key`: AWS Secret Access Key.\n- `region`: Region.\n- `default_region`: Default region.\n- `endpoint`: Custom endpoint for communicating with S3,\ne.g. `https://localhost:4566` for testing against a localstack\ninstance.\n- `token`: Token to use for requests (passed to underlying provider).\n- [Other keys](https://docs.rs/object_store/latest/object_store/aws/enum.AmazonS3ConfigKey.html#variants).\n* Google Cloud Storage:\n- `service_account`: Path to the service account file.\n- `service_account_key`: The serialized service account key.\n- `google_application_credentials`: Application credentials path.\n- [Other keys](https://docs.rs/object_store/latest/object_store/gcp/enum.GoogleConfigKey.html).\n* Microsoft Azure Blob Storage:\n- `access_key`: Azure Access Key.\n- `container_name`: Azure Container Name.\n- `account`: Azure Account.\n- `bearer_token_authorization`: Static bearer token for authorizing requests.\n- `client_id`: Client ID for use in client secret or Kubernetes federated credential flow.\n- `client_secret`: Client secret for use in client secret flow.\n- `tenant_id`: Tenant ID for use in client secret or Kubernetes federated credential flow.\n- `endpoint`: Override the endpoint for communicating with blob storage.\n- [Other keys](https://docs.rs/object_store/latest/object_store/azure/enum.AzureConfigKey.html#variants).\n\nOptions set through the URL take precedence over those set with these\noptions."
}
},
"OutputBufferConfig": {
"type": "object",
"properties": {
"enable_output_buffer": {
"type": "boolean",
"description": "Enable output buffering.\n\nThe output buffering mechanism allows decoupling the rate at which the pipeline\npushes changes to the output transport from the rate of input changes.\n\nBy default, output updates produced by the pipeline are pushed directly to\nthe output transport. Some destinations may prefer to receive updates in fewer\nbigger batches. For instance, when writing Parquet files, producing\none bigger file every few minutes is usually better than creating\nsmall files every few milliseconds.\n\nTo achieve such input/output decoupling, users can enable output buffering by\nsetting the `enable_output_buffer` flag to `true`. When buffering is enabled, output\nupdates produced by the pipeline are consolidated in an internal buffer and are\npushed to the output transport when one of several conditions is satisfied:\n\n* data has been accumulated in the buffer for more than `max_output_buffer_time_millis`\nmilliseconds.\n* buffer size exceeds `max_output_buffer_size_records` records.\n\nThis flag is `false` by default.",
"default": false
},
"max_output_buffer_size_records": {
"type": "integer",
"description": "Maximum number of updates to be kept in the output buffer.\n\nThis parameter bounds the maximal size of the buffer.\nNote that the size of the buffer is not always equal to the\ntotal number of updates output by the pipeline. Updates to the\nsame record can overwrite or cancel previous updates.\n\nBy default, the buffer can grow indefinitely until one of\nthe other output conditions is satisfied.\n\nNOTE: this configuration option requires the `enable_output_buffer` flag\nto be set.",
"default": 18446744073709551615,
"minimum": 0
},
"max_output_buffer_time_millis": {
"type": "integer",
"description": "Maximum time in milliseconds data is kept in the output buffer.\n\nBy default, data is kept in the buffer indefinitely until one of\nthe other output conditions is satisfied. When this option is\nset the buffer will be flushed at most every\n`max_output_buffer_time_millis` milliseconds.\n\nNOTE: this configuration option requires the `enable_output_buffer` flag\nto be set.",
"default": 18446744073709551615,
"minimum": 0
}
}
},
"OutputEndpointConfig": {
"allOf": [
{
"$ref": "#/components/schemas/ConnectorConfig"
},
{
"type": "object",
"required": [
"stream"
],
"properties": {
"stream": {
"type": "string",
"description": "The name of the output stream of the circuit that this endpoint is\nconnected to."
}
}
}
],
"description": "Describes an output connector configuration"
},
"PartialProgramInfo": {
"type": "object",
"description": "Program information is the result of the SQL compilation.",
"required": [
"schema",
"udf_stubs",
"input_connectors",
"output_connectors"
],
"properties": {
"input_connectors": {
"type": "object",
"description": "Input connectors derived from the schema.",
"additionalProperties": {
"$ref": "#/components/schemas/InputEndpointConfig"
}
},
"output_connectors": {
"type": "object",
"description": "Output connectors derived from the schema.",
"additionalProperties": {
"$ref": "#/components/schemas/OutputEndpointConfig"
}
},
"schema": {
"$ref": "#/components/schemas/ProgramSchema"
},
"udf_stubs": {
"type": "string",
"description": "Generated user defined function (UDF) stubs Rust code: stubs.rs"
}
}
},
"PatchPipeline": {
"type": "object",
"description": "Partially update the pipeline (PATCH).\n\nNote that the patching only applies to the main fields, not subfields.\nFor instance, it is not possible to update only the number of workers;\nit is required to again pass the whole runtime configuration with the\nchange.",
"properties": {
"description": {
"type": "string",
"nullable": true
},
"name": {
"type": "string",
"nullable": true
},
"program_code": {
"type": "string",
"nullable": true
},
"program_config": {
"allOf": [
{
"$ref": "#/components/schemas/ProgramConfig"
}
],
"nullable": true
},
"runtime_config": {
"allOf": [
{
"$ref": "#/components/schemas/RuntimeConfig"
}
],
"nullable": true
},
"udf_rust": {
"type": "string",
"nullable": true
},
"udf_toml": {
"type": "string",
"nullable": true
}
}
},
"PipelineConfig": {
"allOf": [
{
"type": "object",
"description": "Global pipeline configuration settings. This is the publicly\nexposed type for users to configure pipelines.",
"properties": {
"clock_resolution_usecs": {
"type": "integer",
"format": "int64",
"description": "Real-time clock resolution in microseconds.\n\nThis parameter controls the execution of queries that use the `NOW()` function. The output of such\nqueries depends on the real-time clock and can change over time without any external\ninputs. The pipeline will update the clock value and trigger incremental recomputation\nat most each `clock_resolution_usecs` microseconds.\n\nIt is set to 100 milliseconds (100,000 microseconds) by default.\n\nSet to `null` to disable periodic clock updates.",
"default": 100000,
"nullable": true,
"minimum": 0
},
"cpu_profiler": {
"type": "boolean",
"description": "Enable CPU profiler.\n\nThe default value is `true`.",
"default": true
},
"fault_tolerance": {
"allOf": [
{
"$ref": "#/components/schemas/FtConfig"
}
],
"default": null,
"nullable": true
},
"max_buffering_delay_usecs": {
"type": "integer",
"format": "int64",
"description": "Maximal delay in microseconds to wait for `min_batch_size_records` to\nget buffered by the controller, defaults to 0.",
"default": 0,
"minimum": 0
},
"max_parallel_connector_init": {
"type": "integer",
"format": "int64",
"description": "The maximum number of connectors initialized in parallel during pipeline\nstartup.\n\nAt startup, the pipeline must initialize all of its input and output connectors.\nDepending on the number and types of connectors, this can take a long time.\nTo accelerate the process, multiple connectors are initialized concurrently.\nThis option controls the maximum number of connectors that can be intitialized\nin parallel.\n\nThe default is 10.",
"default": null,
"nullable": true,
"minimum": 0
},
"min_batch_size_records": {
"type": "integer",
"format": "int64",
"description": "Minimal input batch size.\n\nThe controller delays pushing input records to the circuit until at\nleast `min_batch_size_records` records have been received (total\nacross all endpoints) or `max_buffering_delay_usecs` microseconds\nhave passed since at least one input records has been buffered.\nDefaults to 0.",
"default": 0,
"minimum": 0
},
"pin_cpus": {
"type": "array",
"items": {
"type": "integer",
"minimum": 0
},
"description": "Optionally, a list of CPU numbers for CPUs to which the pipeline may pin\nits worker threads. Specify at least twice as many CPU numbers as\nworkers. CPUs are generally numbered starting from 0. The pipeline\nmight not be able to honor CPU pinning requests.\n\nCPU pinning can make pipelines run faster and perform more consistently,\nas long as different pipelines running on the same machine are pinned to\ndifferent CPUs.",
"default": []
},
"provisioning_timeout_secs": {
"type": "integer",
"format": "int64",
"description": "Timeout in seconds for the `Provisioning` phase of the pipeline.\nSetting this value will override the default of the runner.",
"default": null,
"nullable": true,
"minimum": 0
},
"resources": {
"allOf": [
{
"$ref": "#/components/schemas/ResourceConfig"
}
],
"default": {
"cpu_cores_min": null,
"cpu_cores_max": null,
"memory_mb_min": null,
"memory_mb_max": null,
"storage_mb_max": null,
"storage_class": null
}
},
"storage": {
"allOf": [
{
"$ref": "#/components/schemas/StorageOptions"
}
],
"default": {
"backend": {
"name": "default"
},
"min_storage_bytes": null,
"compression": "default",
"cache_mib": null
},
"nullable": true
},
"tracing": {
"type": "boolean",
"description": "Enable pipeline tracing.",
"default": false
},
"tracing_endpoint_jaeger": {
"type": "string",
"description": "Jaeger tracing endpoint to send tracing information to.",
"default": "127.0.0.1:6831"
},
"workers": {
"type": "integer",
"format": "int32",
"description": "Number of DBSP worker threads.\n\nEach DBSP \"foreground\" worker thread is paired with a \"background\"\nthread for LSM merging, making the total number of threads twice the\nspecified number.",
"default": 8,
"minimum": 0
}
}
},
{
"type": "object",
"required": [
"inputs"
],
"properties": {
"inputs": {
"type": "object",
"description": "Input endpoint configuration.",
"additionalProperties": {
"$ref": "#/components/schemas/InputEndpointConfig"
}
},
"name": {
"type": "string",
"description": "Pipeline name.",
"nullable": true
},
"outputs": {
"type": "object",
"description": "Output endpoint configuration.",
"additionalProperties": {
"$ref": "#/components/schemas/OutputEndpointConfig"
}
},
"storage_config": {
"allOf": [
{
"$ref": "#/components/schemas/StorageConfig"
}
],
"nullable": true
}
}
}
],
"description": "Pipeline deployment configuration.\nIt represents configuration entries directly provided by the user\n(e.g., runtime configuration) and entries derived from the schema\nof the compiled program (e.g., connectors). Storage configuration,\nif applicable, is set by the runner."
},
"PipelineDesiredStatus": {
"type": "string",
"enum": [
"Shutdown",
"Paused",
"Running"
]
},
"PipelineFieldSelector": {
"type": "string",
"enum": [
"all",
"status"
]
},
"PipelineId": {
"type": "string",
"format": "uuid",
"description": "Pipeline identifier."
},
"PipelineInfo": {
"type": "object",
"description": "Pipeline information.\nIt both includes fields which are user-provided and system-generated.",
"required": [
"id",
"name",
"description",
"created_at",
"version",
"platform_version",
"runtime_config",
"program_code",
"udf_rust",
"udf_toml",
"program_config",
"program_version",
"program_status",
"program_status_since",
"program_error",
"deployment_status",
"deployment_status_since",
"deployment_desired_status",
"refresh_version"
],
"properties": {
"created_at": {
"type": "string",
"format": "date-time"
},
"deployment_desired_status": {
"$ref": "#/components/schemas/PipelineDesiredStatus"
},
"deployment_error": {
"allOf": [
{
"$ref": "#/components/schemas/ErrorResponse"
}
],
"nullable": true
},
"deployment_status": {
"$ref": "#/components/schemas/PipelineStatus"
},
"deployment_status_since": {
"type": "string",
"format": "date-time"
},
"description": {
"type": "string"
},
"id": {
"$ref": "#/components/schemas/PipelineId"
},
"name": {
"type": "string"
},
"platform_version": {
"type": "string"
},
"program_code": {
"type": "string"
},
"program_config": {
"$ref": "#/components/schemas/ProgramConfig"
},
"program_error": {
"$ref": "#/components/schemas/ProgramError"
},
"program_info": {
"allOf": [
{
"$ref": "#/components/schemas/PartialProgramInfo"
}
],
"nullable": true
},
"program_status": {
"$ref": "#/components/schemas/ProgramStatus"
},
"program_status_since": {
"type": "string",
"format": "date-time"
},
"program_version": {
"$ref": "#/components/schemas/Version"
},
"refresh_version": {
"$ref": "#/components/schemas/Version"
},
"runtime_config": {
"$ref": "#/components/schemas/RuntimeConfig"
},
"udf_rust": {
"type": "string"
},
"udf_toml": {
"type": "string"
},
"version": {
"$ref": "#/components/schemas/Version"
}
}
},
"PipelineSelectedInfo": {
"type": "object",
"description": "Pipeline information which has a selected subset of optional fields.\nIt both includes fields which are user-provided and system-generated.\nIf an optional field is not selected (i.e., is `None`), it will not be serialized.",
"required": [
"id",
"name",
"description",
"created_at",
"version",
"platform_version",
"program_version",
"program_status",
"program_status_since",
"program_error",
"deployment_status",
"deployment_status_since",
"deployment_desired_status",
"refresh_version"
],
"properties": {
"created_at": {
"type": "string",
"format": "date-time"
},
"deployment_desired_status": {
"$ref": "#/components/schemas/PipelineDesiredStatus"
},
"deployment_error": {
"allOf": [
{
"$ref": "#/components/schemas/ErrorResponse"
}
],
"nullable": true
},
"deployment_status": {
"$ref": "#/components/schemas/PipelineStatus"
},
"deployment_status_since": {
"type": "string",
"format": "date-time"
},
"description": {
"type": "string"
},
"id": {
"$ref": "#/components/schemas/PipelineId"
},
"name": {
"type": "string"
},
"platform_version": {
"type": "string"
},
"program_code": {
"type": "string",
"nullable": true
},
"program_config": {
"allOf": [
{
"$ref": "#/components/schemas/ProgramConfig"
}
],
"nullable": true
},
"program_error": {
"$ref": "#/components/schemas/ProgramError"
},
"program_info": {
"allOf": [
{
"$ref": "#/components/schemas/PartialProgramInfo"
}
],
"nullable": true
},
"program_status": {
"$ref": "#/components/schemas/ProgramStatus"
},
"program_status_since": {
"type": "string",
"format": "date-time"
},
"program_version": {
"$ref": "#/components/schemas/Version"
},
"refresh_version": {
"$ref": "#/components/schemas/Version"
},
"runtime_config": {
"allOf": [
{
"$ref": "#/components/schemas/RuntimeConfig"
}
],
"nullable": true
},
"udf_rust": {
"type": "string",
"nullable": true
},
"udf_toml": {
"type": "string",
"nullable": true
},
"version": {
"$ref": "#/components/schemas/Version"
}
}
},
"PipelineStatus": {
"type": "string",
"description": "Pipeline status.\n\nThis type represents the state of the pipeline tracked by the pipeline\nrunner and observed by the API client via the `GET /v0/pipelines/{name}` endpoint.\n\n### The lifecycle of a pipeline\n\nThe following automaton captures the lifecycle of the pipeline.\nIndividual states and transitions of the automaton are described below.\n\n* States labeled with the hourglass symbol (⌛) are **timed** states. The\nautomaton stays in timed state until the corresponding operation completes\nor until it transitions to become failed after the pre-defined timeout\nperiod expires.\n\n* State transitions labeled with API endpoint names (`/start`, `/pause`,\n`/shutdown`) are triggered by invoking corresponding endpoint,\ne.g., `POST /v0/pipelines/{name}/start`. Note that these only express\ndesired state, and are applied asynchronously by the automata.\n\n```text\nShutdown◄────────────────────┐\n│ │\n/start or /pause│ ShuttingDown ◄────── Failed\n│ ▲ ▲\n▼ /shutdown │ │\n⌛Provisioning ──────────────────┤ Shutdown, Provisioning,\n│ │ Initializing, Paused,\n│ │ Running, Unavailable\n▼ │ (all states except ShuttingDown\n⌛Initializing ──────────────────┤ can transition to Failed)\n│ │\n┌─────────┼────────────────────────┴─┐\n│ ▼ │\n│ Paused ◄──────► Unavailable │\n│ │ ▲ ▲ │\n│ /start│ │/pause │ │\n│ ▼ │ │ │\n│ Running ◄──────────────┘ │\n└────────────────────────────────────┘\n```\n\n### Desired and actual status\n\nWe use the desired state model to manage the lifecycle of a pipeline.\nIn this model, the pipeline has two status attributes associated with\nit at runtime: the **desired** status, which represents what the user\nwould like the pipeline to do, and the **current** status, which\nrepresents the actual state of the pipeline. The pipeline runner\nservice continuously monitors both fields and steers the pipeline\ntowards the desired state specified by the user.\nOnly three of the states in the pipeline automaton above can be\nused as desired statuses: `Paused`, `Running`, and `Shutdown`.\nThese statuses are selected by invoking REST endpoints shown\nin the diagram.\n\nThe user can monitor the current state of the pipeline via the\n`GET /v0/pipelines/{name}` endpoint. In a typical scenario,\nthe user first sets the desired state, e.g., by invoking the\n`/start` endpoint, and then polls the `GET /v0/pipelines/{name}`\nendpoint to monitor the actual status of the pipeline until its\n`deployment_status` attribute changes to `Running` indicating\nthat the pipeline has been successfully initialized and is\nprocessing data, or `Failed`, indicating an error.",
"enum": [
"Shutdown",
"Provisioning",
"Initializing",
"Paused",
"Running",
"Unavailable",
"Failed",
"ShuttingDown"
]
},
"PostPutPipeline": {
"type": "object",
"description": "Create a new pipeline (POST), or fully update an existing pipeline (PUT).\nFields which are optional and not provided will be set to their empty type value\n(for strings: an empty string `\"\"`, for objects: an empty dictionary `{}`).",
"required": [
"name",
"program_code"
],
"properties": {
"description": {
"type": "string",
"nullable": true
},
"name": {
"type": "string"
},
"program_code": {
"type": "string"
},
"program_config": {
"allOf": [
{
"$ref": "#/components/schemas/ProgramConfig"
}
],
"nullable": true
},
"runtime_config": {
"allOf": [
{
"$ref": "#/components/schemas/RuntimeConfig"
}
],
"nullable": true
},
"udf_rust": {
"type": "string",
"nullable": true
},
"udf_toml": {
"type": "string",
"nullable": true
}
}
},
"PostgresReaderConfig": {
"type": "object",
"description": "Postgres input connector configuration.",
"required": [
"uri",
"query"
],
"properties": {
"query": {
"type": "string",
"description": "Query that specifies what data to fetch from postgres."
},
"uri": {
"type": "string",
"description": "Postgres URI."
}
}
},
"ProgramConfig": {
"type": "object",
"description": "Program configuration.",
"properties": {
"cache": {
"type": "boolean",
"description": "If `true` (default), when a prior compilation with the same checksum\nalready exists, the output of that (i.e., binary) is used.\nSet `false` to always trigger a new compilation, which might take longer\nand as well can result in overriding an existing binary.",
"default": true
},
"profile": {
"allOf": [
{
"$ref": "#/components/schemas/CompilationProfile"
}
],
"default": null,
"nullable": true
}
}
},
"ProgramError": {
"type": "object",
"description": "Log, warning and error information about the program compilation.",
"properties": {
"rust_compilation": {
"allOf": [
{
"$ref": "#/components/schemas/RustCompilationInfo"
}
],
"nullable": true
},
"sql_compilation": {
"allOf": [
{
"$ref": "#/components/schemas/SqlCompilationInfo"
}
],
"nullable": true
},
"system_error": {
"type": "string",
"description": "System error that occurred.\n- Set `Some(...)` upon transition to `SystemError`\n- Set `None` upon transition to `Pending`",
"nullable": true
}
}
},
"ProgramSchema": {
"type": "object",
"description": "A struct containing the tables (inputs) and views for a program.\n\nParse from the JSON data-type of the DDL generated by the SQL compiler.",
"required": [
"inputs",
"outputs"
],
"properties": {
"inputs": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Relation"
}
},
"outputs": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Relation"
}
}
}
},
"ProgramStatus": {
"type": "string",
"description": "Program compilation status.",
"enum": [
"Pending",
"CompilingSql",
"SqlCompiled",
"CompilingRust",
"Success",
"SqlError",
"RustError",
"SystemError"
]
},
"PropertyValue": {
"type": "object",
"required": [
"value",
"key_position",
"value_position"
],
"properties": {
"key_position": {
"$ref": "#/components/schemas/SourcePosition"
},
"value": {
"type": "string"
},
"value_position": {
"$ref": "#/components/schemas/SourcePosition"
}
}
},
"ProviderAwsCognito": {
"type": "object",
"required": [
"jwk_uri",
"login_url",
"logout_url"
],
"properties": {
"jwk_uri": {
"type": "string"
},
"login_url": {
"type": "string"
},
"logout_url": {
"type": "string"
}
}
},
"ProviderGoogleIdentity": {
"type": "object",
"required": [
"jwk_uri",
"client_id"
],
"properties": {
"client_id": {
"type": "string"
},
"jwk_uri": {
"type": "string"
}
}
},
"PubSubInputConfig": {
"type": "object",
"description": "Google Pub/Sub input connector configuration.",
"required": [
"subscription"
],
"properties": {
"connect_timeout_seconds": {
"type": "integer",
"format": "int32",
"description": "gRPC connection timeout.",
"nullable": true,
"minimum": 0
},
"credentials": {
"type": "string",
"description": "The content of a Google Cloud credentials JSON file.\n\nWhen this option is specified, the connector will use the provided credentials for\nauthentication. Otherwise, it will use Application Default Credentials (ADC) configured\nin the environment where the Feldera service is running. See\n[Google Cloud documentation](https://cloud.google.com/docs/authentication/provide-credentials-adc)\nfor information on configuring application default credentials.\n\nWhen running Feldera in an environment where ADC are not configured,\ne.g., a Docker container, use this option to ship Google Cloud credentials from another environment.\nFor example, if you use the\n[`gcloud auth application-default login`](https://cloud.google.com/pubsub/docs/authentication#client-libs)\ncommand for authentication in your local development environment, ADC are stored in the\n`.config/gcloud/application_default_credentials.json` file in your home directory.",
"nullable": true
},
"emulator": {
"type": "string",
"description": "Set in order to use a Pub/Sub [emulator](https://cloud.google.com/pubsub/docs/emulator)\ninstead of the production service, e.g., 'localhost:8681'.",
"nullable": true
},
"endpoint": {
"type": "string",
"description": "Override the default service endpoint 'pubsub.googleapis.com'",
"nullable": true
},
"pool_size": {
"type": "integer",
"format": "int32",
"description": "gRPC channel pool size.",
"nullable": true,
"minimum": 0
},
"project_id": {
"type": "string",
"description": "Google Cloud project_id.\n\nWhen not specified, the connector will use the project id associated\nwith the authenticated account.",
"nullable": true
},
"snapshot": {
"type": "string",
"description": "Reset subscription's backlog to a given snapshot on startup,\nusing the Pub/Sub `Seek` API.\n\nThis option is mutually exclusive with the `timestamp` option.",
"nullable": true
},
"subscription": {
"type": "string",
"description": "Subscription name."
},
"timeout_seconds": {
"type": "integer",
"format": "int32",
"description": "gRPC request timeout.",
"nullable": true,
"minimum": 0
},
"timestamp": {
"type": "string",
"description": "Reset subscription's backlog to a given timestamp on startup,\nusing the Pub/Sub `Seek` API.\n\nThe value of this option is an ISO 8601-encoded UTC time, e.g., \"2024-08-17T16:39:57-08:00\".\n\nThis option is mutually exclusive with the `snapshot` option.",
"nullable": true
}
}
},
"RedisOutputConfig": {
"type": "object",
"description": "Redis output connector configuration.",
"required": [
"connection_string"
],
"properties": {
"connection_string": {
"type": "string",
"description": "The URL format: `redis://[<username>][:<password>@]<hostname>[:port][/[<db>][?protocol=<protocol>]]`\nThis is parsed by the [redis](https://docs.rs/redis/latest/redis/#connection-parameters) crate."
},
"key_separator": {
"type": "string",
"description": "Separator used to join multiple components into a single key.\n\":\" by default."
}
}
},
"Relation": {
"allOf": [
{
"$ref": "#/components/schemas/SqlIdentifier"
},
{
"type": "object",
"required": [
"fields"
],
"properties": {
"fields": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Field"
}
},
"materialized": {
"type": "boolean"
},
"properties": {
"type": "object",
"additionalProperties": {
"$ref": "#/components/schemas/PropertyValue"
}
}
}
}
],
"description": "A SQL table or view. It has a name and a list of fields.\n\nMatches the Calcite JSON format."
},
"ResourceConfig": {
"type": "object",
"properties": {
"cpu_cores_max": {
"type": "integer",
"format": "int64",
"description": "The maximum number of CPU cores to reserve\nfor an instance of this pipeline",
"default": null,
"nullable": true,
"minimum": 0
},
"cpu_cores_min": {
"type": "integer",
"format": "int64",
"description": "The minimum number of CPU cores to reserve\nfor an instance of this pipeline",
"default": null,
"nullable": true,
"minimum": 0
},
"memory_mb_max": {
"type": "integer",
"format": "int64",
"description": "The maximum memory in Megabytes to reserve\nfor an instance of this pipeline",
"default": null,
"nullable": true,
"minimum": 0
},
"memory_mb_min": {
"type": "integer",
"format": "int64",
"description": "The minimum memory in Megabytes to reserve\nfor an instance of this pipeline",
"default": null,
"nullable": true,
"minimum": 0
},
"storage_class": {
"type": "string",
"description": "Storage class to use for an instance of this pipeline.\nThe class determines storage performance such as IOPS and throughput.",
"default": null,
"nullable": true
},
"storage_mb_max": {
"type": "integer",
"format": "int64",
"description": "The total storage in Megabytes to reserve\nfor an instance of this pipeline",
"default": null,
"nullable": true,
"minimum": 0
}
}
},
"RestCatalogConfig": {
"type": "object",
"description": "Iceberg REST catalog config.",
"properties": {
"rest.audience": {
"type": "string",
"description": "Logical name of target resource or service.",
"nullable": true
},
"rest.credential": {
"type": "string",
"description": "Credential to use for OAuth2 credential flow when initializing the catalog.\n\nA key and secret pair separated by \":\" (key is optional).",
"nullable": true
},
"rest.headers": {
"type": "array",
"items": {
"type": "array",
"items": {
"allOf": [
{
"type": "string"
},
{
"type": "string"
}
]
}
},
"description": "Additional HTTP request headers added to each catalog REST API call.",
"nullable": true
},
"rest.oauth2-server-uri": {
"type": "string",
"description": "Authentication URL to use for client credentials authentication (default: uri + 'v1/oauth/tokens')",
"nullable": true
},
"rest.prefix": {
"type": "string",
"description": "Customize table storage paths.\n\nWhen combined with the `warehouse` property, the prefix determines\nhow table data is organized within the storage.",
"nullable": true
},
"rest.resource": {
"type": "string",
"description": "URI for the target resource or service.",
"nullable": true
},
"rest.scope": {
"type": "string",
"nullable": true
},
"rest.token": {
"type": "string",
"description": "Bearer token value to use for `Authorization` header.",
"nullable": true
},
"rest.uri": {
"type": "string",
"description": "URI identifying the REST catalog server.",
"nullable": true
},
"rest.warehouse": {
"type": "string",
"description": "The default location for managed tables created by the catalog.",
"nullable": true
}
}
},
"RngFieldSettings": {
"type": "object",
"description": "Configuration for generating random data for a field of a table.",
"properties": {
"e": {
"type": "integer",
"format": "int64",
"description": "The frequency rank exponent for the Zipf distribution.\n\n- This value is only used if the strategy is set to `Zipf`.\n- The default value is 1.0.",
"default": 1
},
"fields": {
"type": "object",
"description": "Specifies the values that the generator should produce in case the field is a struct type.",
"default": null,
"additionalProperties": {
"$ref": "#/components/schemas/RngFieldSettings"
},
"nullable": true
},
"key": {
"allOf": [
{
"$ref": "#/components/schemas/RngFieldSettings"
}
],
"default": null,
"nullable": true
},
"null_percentage": {
"type": "integer",
"description": "Percentage of records where this field should be set to NULL.\n\nIf not set, the generator will produce only records with non-NULL values.\nIf set to `1..=100`, the generator will produce records with NULL values with the specified percentage.",
"default": null,
"nullable": true,
"minimum": 0
},
"range": {
"type": "object",
"description": "An optional, exclusive range [a, b) to limit the range of values the generator should produce.\n\n- For integer/floating point types specifies min/max values as an integer.\nIf not set, the generator will produce values for the entire range of the type for number types.\n- For string/binary types specifies min/max length as an integer, values are required to be >=0.\nIf not set, a range of [0, 25) is used by default.\n- For timestamp types specifies the min/max as two strings in the RFC 3339 format\n(e.g., [\"2021-01-01T00:00:00Z\", \"2022-01-02T00:00:00Z\"]).\nAlternatively, the range values can be specified as a number of non-leap\nmilliseconds since January 1, 1970 0:00:00.000 UTC (aka “UNIX timestamp”).\nIf not set, a range of [\"1970-01-01T00:00:00Z\", \"2100-01-01T00:00:00Z\") or [0, 4102444800000)\nis used by default.\n- For time types specifies the min/max as two strings in the \"HH:MM:SS\" format.\nAlternatively, the range values can be specified in milliseconds as two positive integers.\nIf not set, the range is 24h.\n- For date types, the min/max range is specified as two strings in the \"YYYY-MM-DD\" format.\nAlternatively, two integers that represent number of days since January 1, 1970 can be used.\nIf not set, a range of [\"1970-01-01\", \"2100-01-01\") or [0, 54787) is used by default.\n- For array types specifies the min/max number of elements as an integer.\nIf not set, a range of [0, 5) is used by default. Range values are required to be >=0.\n- For map types specifies the min/max number of key-value pairs as an integer.\nIf not set, a range of [0, 5) is used by default.\n- For struct/boolean/null types `range` is ignored."
},
"scale": {
"type": "integer",
"format": "int64",
"description": "A scale factor to apply a multiplier to the generated value.\n\n- For integer/floating point types, the value is multiplied by the scale factor.\n- For timestamp types, the generated value (milliseconds) is multiplied by the scale factor.\n- For time types, the generated value (milliseconds) is multiplied by the scale factor.\n- For date types, the generated value (days) is multiplied by the scale factor.\n- For string/binary/array/map/struct/boolean/null types, the scale factor is ignored.\n\n- If `values` is specified, the scale factor is ignored.\n- If `range` is specified and the range is required to be positive (struct, map, array etc.)\nthe scale factor is required to be positive too.\n\nThe default scale factor is 1.",
"default": 1
},
"strategy": {
"allOf": [
{
"$ref": "#/components/schemas/DatagenStrategy"
}
],
"default": "increment"
},
"value": {
"allOf": [
{
"$ref": "#/components/schemas/RngFieldSettings"
}
],
"default": null,
"nullable": true
},
"values": {
"type": "array",
"items": {
"type": "object"
},
"description": "An optional set of values the generator will pick from.\n\nIf set, the generator will pick values from the specified set.\nIf not set, the generator will produce values according to the specified range.\nIf set to an empty set, the generator will produce NULL values.\nIf set to a single value, the generator will produce only that value.\n\nNote that `range` is ignored if `values` is set.",
"default": null,
"nullable": true
}
},
"additionalProperties": false
},
"RuntimeConfig": {
"type": "object",
"description": "Global pipeline configuration settings. This is the publicly\nexposed type for users to configure pipelines.",
"properties": {
"clock_resolution_usecs": {
"type": "integer",
"format": "int64",
"description": "Real-time clock resolution in microseconds.\n\nThis parameter controls the execution of queries that use the `NOW()` function. The output of such\nqueries depends on the real-time clock and can change over time without any external\ninputs. The pipeline will update the clock value and trigger incremental recomputation\nat most each `clock_resolution_usecs` microseconds.\n\nIt is set to 100 milliseconds (100,000 microseconds) by default.\n\nSet to `null` to disable periodic clock updates.",
"default": 100000,
"nullable": true,
"minimum": 0
},
"cpu_profiler": {
"type": "boolean",
"description": "Enable CPU profiler.\n\nThe default value is `true`.",
"default": true
},
"fault_tolerance": {
"allOf": [
{
"$ref": "#/components/schemas/FtConfig"
}
],
"default": null,
"nullable": true
},
"max_buffering_delay_usecs": {
"type": "integer",
"format": "int64",
"description": "Maximal delay in microseconds to wait for `min_batch_size_records` to\nget buffered by the controller, defaults to 0.",
"default": 0,
"minimum": 0
},
"max_parallel_connector_init": {
"type": "integer",
"format": "int64",
"description": "The maximum number of connectors initialized in parallel during pipeline\nstartup.\n\nAt startup, the pipeline must initialize all of its input and output connectors.\nDepending on the number and types of connectors, this can take a long time.\nTo accelerate the process, multiple connectors are initialized concurrently.\nThis option controls the maximum number of connectors that can be intitialized\nin parallel.\n\nThe default is 10.",
"default": null,
"nullable": true,
"minimum": 0
},
"min_batch_size_records": {
"type": "integer",
"format": "int64",
"description": "Minimal input batch size.\n\nThe controller delays pushing input records to the circuit until at\nleast `min_batch_size_records` records have been received (total\nacross all endpoints) or `max_buffering_delay_usecs` microseconds\nhave passed since at least one input records has been buffered.\nDefaults to 0.",
"default": 0,
"minimum": 0
},
"pin_cpus": {
"type": "array",
"items": {
"type": "integer",
"minimum": 0
},
"description": "Optionally, a list of CPU numbers for CPUs to which the pipeline may pin\nits worker threads. Specify at least twice as many CPU numbers as\nworkers. CPUs are generally numbered starting from 0. The pipeline\nmight not be able to honor CPU pinning requests.\n\nCPU pinning can make pipelines run faster and perform more consistently,\nas long as different pipelines running on the same machine are pinned to\ndifferent CPUs.",
"default": []
},
"provisioning_timeout_secs": {
"type": "integer",
"format": "int64",
"description": "Timeout in seconds for the `Provisioning` phase of the pipeline.\nSetting this value will override the default of the runner.",
"default": null,
"nullable": true,
"minimum": 0
},
"resources": {
"allOf": [
{
"$ref": "#/components/schemas/ResourceConfig"
}
],
"default": {
"cpu_cores_min": null,
"cpu_cores_max": null,
"memory_mb_min": null,
"memory_mb_max": null,
"storage_mb_max": null,
"storage_class": null
}
},
"storage": {
"allOf": [
{
"$ref": "#/components/schemas/StorageOptions"
}
],
"default": {
"backend": {
"name": "default"
},
"min_storage_bytes": null,
"compression": "default",
"cache_mib": null
},
"nullable": true
},
"tracing": {
"type": "boolean",
"description": "Enable pipeline tracing.",
"default": false
},
"tracing_endpoint_jaeger": {
"type": "string",
"description": "Jaeger tracing endpoint to send tracing information to.",
"default": "127.0.0.1:6831"
},
"workers": {
"type": "integer",
"format": "int32",
"description": "Number of DBSP worker threads.\n\nEach DBSP \"foreground\" worker thread is paired with a \"background\"\nthread for LSM merging, making the total number of threads twice the\nspecified number.",
"default": 8,
"minimum": 0
}
}
},
"RustCompilationInfo": {
"type": "object",
"description": "Rust compilation information.",
"required": [
"exit_code",
"stdout",
"stderr"
],
"properties": {
"exit_code": {
"type": "integer",
"format": "int32",
"description": "Exit code of the `cargo` compilation command."
},
"stderr": {
"type": "string",
"description": "Output printed to stderr by the `cargo` compilation command."
},
"stdout": {
"type": "string",
"description": "Output printed to stdout by the `cargo` compilation command."
}
}
},
"S3InputConfig": {
"type": "object",
"description": "Configuration for reading data from AWS S3.",
"required": [
"region",
"bucket_name"
],
"properties": {
"aws_access_key_id": {
"type": "string",
"description": "AWS Access Key id. This property must be specified unless `no_sign_request` is set to `true`.",
"nullable": true
},
"aws_secret_access_key": {
"type": "string",
"description": "Secret Access Key. This property must be specified unless `no_sign_request` is set to `true`.",
"nullable": true
},
"bucket_name": {
"type": "string",
"description": "S3 bucket name to access."
},
"endpoint_url": {
"type": "string",
"description": "The endpoint URL used to communicate with this service. Can be used to make this connector\ntalk to non-AWS services with an S3 API.",
"nullable": true
},
"key": {
"type": "string",
"description": "Read a single object specified by a key.",
"nullable": true
},
"no_sign_request": {
"type": "boolean",
"description": "Do not sign requests. This is equivalent to the `--no-sign-request` flag in the AWS CLI."
},
"prefix": {
"type": "string",
"description": "Read all objects whose keys match a prefix. Set to an empty string to read all objects in the bucket.",
"nullable": true
},
"region": {
"type": "string",
"description": "AWS region."
}
}
},
"SourcePosition": {
"type": "object",
"required": [
"start_line_number",
"start_column",
"end_line_number",
"end_column"
],
"properties": {
"end_column": {
"type": "integer",
"minimum": 0
},
"end_line_number": {
"type": "integer",
"minimum": 0
},
"start_column": {
"type": "integer",
"minimum": 0
},
"start_line_number": {
"type": "integer",
"minimum": 0
}
}
},
"SqlCompilationInfo": {
"type": "object",
"description": "SQL compilation information.",
"required": [
"exit_code",
"messages"
],
"properties": {
"exit_code": {
"type": "integer",
"format": "int32",
"description": "Exit code of the SQL compiler."
},
"messages": {
"type": "array",
"items": {
"$ref": "#/components/schemas/SqlCompilerMessage"
},
"description": "Messages (warnings and errors) generated by the SQL compiler."
}
}
},
"SqlCompilerMessage": {
"type": "object",
"description": "A SQL compiler error.\n\nThe SQL compiler returns a list of errors in the following JSON format if\nit's invoked with the `-je` option.\n\n```ignore\n[ {\n\"start_line_number\" : 2,\n\"start_column\" : 4,\n\"end_line_number\" : 2,\n\"end_column\" : 8,\n\"warning\" : false,\n\"error_type\" : \"PRIMARY KEY cannot be nullable\",\n\"message\" : \"PRIMARY KEY column 'C' has type INTEGER, which is nullable\",\n\"snippet\" : \" 2| c INT PRIMARY KEY\\n ^^^^^\\n 3|);\\n\"\n} ]\n```",
"required": [
"start_line_number",
"start_column",
"end_line_number",
"end_column",
"warning",
"error_type",
"message"
],
"properties": {
"end_column": {
"type": "integer",
"minimum": 0
},
"end_line_number": {
"type": "integer",
"minimum": 0
},
"error_type": {
"type": "string"
},
"message": {
"type": "string"
},
"snippet": {
"type": "string",
"nullable": true
},
"start_column": {
"type": "integer",
"minimum": 0
},
"start_line_number": {
"type": "integer",
"minimum": 0
},
"warning": {
"type": "boolean"
}
}
},
"SqlIdentifier": {
"type": "object",
"description": "An SQL identifier.\n\nThis struct is used to represent SQL identifiers in a canonical form.\nWe store table names or field names as identifiers in the schema.",
"required": [
"name",
"case_sensitive"
],
"properties": {
"case_sensitive": {
"type": "boolean"
},
"name": {
"type": "string"
}
}
},
"SqlType": {
"oneOf": [
{
"type": "string",
"description": "SQL `BOOLEAN` type.",
"enum": [
"Boolean"
]
},
{
"type": "string",
"description": "SQL `TINYINT` type.",
"enum": [
"TinyInt"
]
},
{
"type": "string",
"description": "SQL `SMALLINT` or `INT2` type.",
"enum": [
"SmallInt"
]
},
{
"type": "string",
"description": "SQL `INTEGER`, `INT`, `SIGNED`, `INT4` type.",
"enum": [
"Int"
]
},
{
"type": "string",
"description": "SQL `BIGINT` or `INT64` type.",
"enum": [
"BigInt"
]
},
{
"type": "string",
"description": "SQL `REAL` or `FLOAT4` or `FLOAT32` type.",
"enum": [
"Real"
]
},
{
"type": "string",
"description": "SQL `DOUBLE` or `FLOAT8` or `FLOAT64` type.",
"enum": [
"Double"
]
},
{
"type": "string",
"description": "SQL `DECIMAL` or `DEC` or `NUMERIC` type.",
"enum": [
"Decimal"
]
},
{
"type": "string",
"description": "SQL `CHAR(n)` or `CHARACTER(n)` type.",
"enum": [
"Char"
]
},
{
"type": "string",
"description": "SQL `VARCHAR`, `CHARACTER VARYING`, `TEXT`, or `STRING` type.",
"enum": [
"Varchar"
]
},
{
"type": "string",
"description": "SQL `BINARY(n)` type.",
"enum": [
"Binary"
]
},
{
"type": "string",
"description": "SQL `VARBINARY` or `BYTEA` type.",
"enum": [
"Varbinary"
]
},
{
"type": "string",
"description": "SQL `TIME` type.",
"enum": [
"Time"
]
},
{
"type": "string",
"description": "SQL `DATE` type.",
"enum": [
"Date"
]
},
{
"type": "string",
"description": "SQL `TIMESTAMP` type.",
"enum": [
"Timestamp"
]
},
{
"type": "object",
"required": [
"Interval"
],
"properties": {
"Interval": {
"$ref": "#/components/schemas/IntervalUnit"
}
}
},
{
"type": "string",
"description": "SQL `ARRAY` type.",
"enum": [
"Array"
]
},
{
"type": "string",
"description": "A complex SQL struct type (`CREATE TYPE x ...`).",
"enum": [
"Struct"
]
},
{
"type": "string",
"description": "SQL `MAP` type.",
"enum": [
"Map"
]
},
{
"type": "string",
"description": "SQL `NULL` type.",
"enum": [
"Null"
]
},
{
"type": "string",
"description": "SQL `UUID` type.",
"enum": [
"Uuid"
]
},
{
"type": "string",
"description": "SQL `VARIANT` type.",
"enum": [
"Variant"
]
}
],
"description": "The available SQL types as specified in `CREATE` statements."
},
"StorageBackendConfig": {
"oneOf": [
{
"type": "object",
"required": [
"name"
],
"properties": {
"name": {
"type": "string",
"enum": [
"default"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/ObjectStorageConfig"
},
"name": {
"type": "string",
"enum": [
"object"
]
}
}
}
],
"description": "Backend storage configuration.",
"discriminator": {
"propertyName": "name"
}
},
"StorageCacheConfig": {
"type": "string",
"description": "How to cache access to storage within a Feldera pipeline.",
"enum": [
"page_cache",
"feldera_cache"
]
},
"StorageCompression": {
"type": "string",
"description": "Storage compression algorithm.",
"enum": [
"default",
"none",
"snappy"
]
},
"StorageConfig": {
"type": "object",
"description": "Configuration for persistent storage in a [`PipelineConfig`].",
"required": [
"path"
],
"properties": {
"cache": {
"$ref": "#/components/schemas/StorageCacheConfig"
},
"path": {
"type": "string",
"description": "A directory to keep pipeline state, as a path on the filesystem of the\nmachine or container where the pipeline will run.\n\nWhen storage is enabled, this directory stores the data for\n[StorageBackendConfig::Default].\n\nWhen fault tolerance is enabled, this directory stores checkpoints and\nthe log."
}
}
},
"StorageOptions": {
"type": "object",
"description": "Storage configuration for a pipeline.",
"properties": {
"backend": {
"allOf": [
{
"$ref": "#/components/schemas/StorageBackendConfig"
}
],
"default": {
"name": "default"
}
},
"cache_mib": {
"type": "integer",
"description": "The maximum size of the in-memory storage cache, in MiB.\n\nIf set, the specified cache size is spread across all the foreground and\nbackground threads. If unset, each foreground or background thread cache\nis limited to 256 MiB.",
"default": null,
"nullable": true,
"minimum": 0
},
"compression": {
"allOf": [
{
"$ref": "#/components/schemas/StorageCompression"
}
],
"default": "default"
},
"min_storage_bytes": {
"type": "integer",
"description": "The minimum estimated number of bytes in a batch of data to write it to\nstorage. This is provided for debugging and fine-tuning and should\nordinarily be left unset.\n\nA value of 0 will write even empty batches to storage, and nonzero\nvalues provide a threshold. `usize::MAX` would effectively disable\nstorage.\n\nThe default is 1,048,576 (1 MiB).",
"default": null,
"nullable": true,
"minimum": 0
}
}
},
"TransportConfig": {
"oneOf": [
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/FileInputConfig"
},
"name": {
"type": "string",
"enum": [
"file_input"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/FileOutputConfig"
},
"name": {
"type": "string",
"enum": [
"file_output"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/KafkaInputConfig"
},
"name": {
"type": "string",
"enum": [
"kafka_input"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/KafkaOutputConfig"
},
"name": {
"type": "string",
"enum": [
"kafka_output"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/PubSubInputConfig"
},
"name": {
"type": "string",
"enum": [
"pub_sub_input"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/UrlInputConfig"
},
"name": {
"type": "string",
"enum": [
"url_input"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/S3InputConfig"
},
"name": {
"type": "string",
"enum": [
"s3_input"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/DeltaTableReaderConfig"
},
"name": {
"type": "string",
"enum": [
"delta_table_input"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/DeltaTableWriterConfig"
},
"name": {
"type": "string",
"enum": [
"delta_table_output"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/RedisOutputConfig"
},
"name": {
"type": "string",
"enum": [
"redis_output"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/IcebergReaderConfig"
},
"name": {
"type": "string",
"enum": [
"iceberg_input"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/PostgresReaderConfig"
},
"name": {
"type": "string",
"enum": [
"postgres_input"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/DatagenInputConfig"
},
"name": {
"type": "string",
"enum": [
"datagen"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/NexmarkInputConfig"
},
"name": {
"type": "string",
"enum": [
"nexmark"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/HttpInputConfig"
},
"name": {
"type": "string",
"enum": [
"http_input"
]
}
}
},
{
"type": "object",
"required": [
"name"
],
"properties": {
"name": {
"type": "string",
"enum": [
"http_output"
]
}
}
},
{
"type": "object",
"required": [
"name",
"config"
],
"properties": {
"config": {
"$ref": "#/components/schemas/AdHocInputConfig"
},
"name": {
"type": "string",
"enum": [
"ad_hoc_input"
]
}
}
}
],
"description": "Transport-specific endpoint configuration passed to\n`crate::OutputTransport::new_endpoint`\nand `crate::InputTransport::new_endpoint`.",
"discriminator": {
"propertyName": "name"
}
},
"UpdateInformation": {
"type": "object",
"required": [
"latest_version",
"is_latest_version",
"instructions_url",
"remind_schedule"
],
"properties": {
"instructions_url": {
"type": "string",
"description": "URL that navigates the user to instructions on how to update their deployment's version"
},
"is_latest_version": {
"type": "boolean",
"description": "Whether the current version matches the latest version"
},
"latest_version": {
"type": "string",
"description": "Latest version corresponding to the edition"
},
"remind_schedule": {
"$ref": "#/components/schemas/DisplaySchedule"
}
}
},
"UrlInputConfig": {
"type": "object",
"description": "Configuration for reading data from an HTTP or HTTPS URL with\n`UrlInputTransport`.",
"required": [
"path"
],
"properties": {
"path": {
"type": "string",
"description": "URL."
},
"pause_timeout": {
"type": "integer",
"format": "int32",
"description": "Timeout before disconnection when paused, in seconds.\n\nIf the pipeline is paused, or if the input adapter reads data faster\nthan the pipeline can process it, then the controller will pause the\ninput adapter. If the input adapter stays paused longer than this\ntimeout, it will drop the network connection to the server. It will\nautomatically reconnect when the input adapter starts running again.",
"minimum": 0
}
}
},
"Version": {
"type": "integer",
"format": "int64",
"description": "Version number."
}
},
"securitySchemes": {
"JSON web token (JWT) or API key": {
"type": "http",
"scheme": "bearer",
"bearerFormat": "JWT",
"description": "Use a JWT token obtained via an OAuth2/OIDC\n login workflow or an API key obtained via\n the `/v0/api-keys` endpoint."
}
}
},
"tags": [
{
"name": "Pipeline management",
"description": "Create, retrieve, update, delete and deploy pipelines."
},
{
"name": "Pipeline interaction",
"description": "Interact with deployed pipelines."
},
{
"name": "Configuration",
"description": "Retrieve configuration."
},
{
"name": "API keys",
"description": "Create, retrieve and delete API keys."
},
{
"name": "Metrics",
"description": "Retrieve metrics across pipelines."
}
]
}