#![allow(unexpected_cfgs)]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, allow(unused_attributes))]
use crate::error::{ConfigError, Error, Result};
pub use engine::PluginEngine;
pub use engine::QueryBuilder;
pub use hipcheck_common::types::LogLevel;
use schemars::Schema as JsonSchema;
use serde_json::Value as JsonValue;
pub use server::PluginServer;
use std::result::Result as StdResult;
use std::str::FromStr;
#[cfg(feature = "macros")]
#[cfg_attr(docsrs, doc(cfg(feature = "macros")))]
pub mod macros {
pub use hipcheck_sdk_macros::*;
}
pub trait PluginConfig<'de> {
fn deserialize(config: &serde_json::Value) -> StdResult<Self, ConfigError>
where
Self: Sized;
}
#[cfg(feature = "print-timings")]
mod benchmarking;
mod engine;
pub mod error;
mod server;
#[cfg(feature = "mock_engine")]
#[cfg_attr(docsrs, doc(cfg(feature = "mock_engine")))]
pub mod mock {
pub use crate::engine::MockResponses;
}
pub mod types;
pub mod prelude {
pub use crate::deps::*;
pub use crate::engine::{PluginEngine, QueryBuilder};
pub use crate::error::{ConfigError, ConfigResult, Error, Result};
pub use crate::server::{Host, PluginServer, QueryResult};
pub use crate::types::{KnownRemote, RemoteGitRepo};
pub use crate::{DynQuery, NamedQuery, Plugin, Query, QuerySchema, QueryTarget};
#[cfg(feature = "macros")]
#[cfg_attr(docsrs, doc(cfg(feature = "macros")))]
pub use crate::macros::{queries, query};
#[cfg(feature = "mock_engine")]
#[cfg_attr(docsrs, doc(cfg(feature = "mock_engine")))]
pub use crate::engine::MockResponses;
}
pub mod deps {
pub use jiff::{Span, Zoned};
pub use schemars::{Schema as JsonSchema, schema_for};
pub use serde_json::{Value, from_str, from_value, to_value};
pub use tonic::async_trait;
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct QueryTarget {
pub publisher: String,
pub plugin: String,
pub query: Option<String>,
}
impl FromStr for QueryTarget {
type Err = Error;
fn from_str(s: &str) -> StdResult<Self, Self::Err> {
let parts: Vec<&str> = s.split('/').collect();
match parts.as_slice() {
[publisher, plugin, query] => Ok(Self {
publisher: publisher.to_string(),
plugin: plugin.to_string(),
query: Some(query.to_string()),
}),
[publisher, plugin] => Ok(Self {
publisher: publisher.to_string(),
plugin: plugin.to_string(),
query: None,
}),
_ => Err(Error::InvalidQueryTargetFormat),
}
}
}
impl std::fmt::Display for QueryTarget {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.query {
Some(query) => write!(f, "{}/{}/{}", self.publisher, self.plugin, query),
None => write!(f, "{}/{}", self.publisher, self.plugin),
}
}
}
impl TryInto<QueryTarget> for &str {
type Error = Error;
fn try_into(self) -> StdResult<QueryTarget, Self::Error> {
QueryTarget::from_str(self)
}
}
pub struct QuerySchema {
query_name: &'static str,
input_schema: JsonSchema,
output_schema: JsonSchema,
}
pub type DynQuery = Box<dyn Query>;
pub struct NamedQuery {
pub name: &'static str,
pub inner: DynQuery,
}
impl NamedQuery {
fn is_default(&self) -> bool {
self.name.is_empty()
}
}
#[tonic::async_trait]
pub trait Query: Send {
fn input_schema(&self) -> JsonSchema;
fn output_schema(&self) -> JsonSchema;
async fn run(&self, engine: &mut PluginEngine, input: JsonValue) -> Result<JsonValue>;
}
pub trait Plugin: Send + Sync + 'static {
const PUBLISHER: &'static str;
const NAME: &'static str;
fn set_config(&self, config: JsonValue) -> StdResult<(), ConfigError>;
fn default_policy_expr(&self) -> Result<String> {
Ok(String::new())
}
fn explain_default_query(&self) -> Result<Option<String>> {
Ok(None)
}
fn queries(&self) -> impl Iterator<Item = NamedQuery>;
fn default_query(&self) -> Option<DynQuery> {
self.queries()
.find_map(|named| named.is_default().then_some(named.inner))
}
fn schemas(&self) -> impl Iterator<Item = QuerySchema> {
self.queries().map(|query| QuerySchema {
query_name: query.name,
input_schema: query.inner.input_schema(),
output_schema: query.inner.output_schema(),
})
}
}
#[cfg(feature = "log_forwarding")]
pub fn init_tracing_logger(log_level: LogLevel) {
tracing_subscriber::fmt()
.json()
.with_writer(std::io::stderr)
.with_env_filter(log_level.to_string())
.init();
}