use std::{collections::HashMap, sync::Arc};
use moka::sync::Cache as MokaCache;
use super::{
QueryType,
relay::{RelayDispatch, RelayDispatchImpl},
};
use crate::{
db::{RelayDatabaseAdapter, traits::DatabaseAdapter, types::PoolMetrics},
graphql::ParsedQuery,
runtime::{QueryMatcher, QueryPlanner, RuntimeConfig},
schema::{CompiledSchema, IntrospectionResponses},
};
const PARSE_CACHE_CAPACITY: u64 = 1_024;
pub struct Executor<A: DatabaseAdapter> {
pub(super) schema: CompiledSchema,
pub(super) adapter: Arc<A>,
pub(super) relay: Option<Arc<dyn RelayDispatch>>,
pub(super) matcher: QueryMatcher,
pub(super) planner: QueryPlanner,
pub(super) config: RuntimeConfig,
pub(super) introspection: IntrospectionResponses,
pub(super) node_type_index: HashMap<String, Arc<str>>,
pub(super) parse_cache: MokaCache<u64, Arc<(QueryType, Option<ParsedQuery>)>>,
pub(super) response_cache: Option<Arc<crate::cache::ResponseCache>>,
}
impl<A: DatabaseAdapter> Executor<A> {
#[must_use]
pub fn new(schema: CompiledSchema, adapter: Arc<A>) -> Self {
Self::with_config(schema, adapter, RuntimeConfig::default())
}
#[must_use]
pub fn with_config(schema: CompiledSchema, adapter: Arc<A>, config: RuntimeConfig) -> Self {
let matcher = QueryMatcher::new(schema.clone());
let planner = QueryPlanner::new(config.cache_query_plans);
let introspection = IntrospectionResponses::build(&schema);
let mut node_type_index: HashMap<String, Arc<str>> = HashMap::new();
for q in &schema.queries {
if let Some(src) = q.sql_source.as_deref() {
node_type_index.entry(q.return_type.clone()).or_insert_with(|| Arc::from(src));
}
}
Self {
schema,
adapter,
relay: None,
matcher,
planner,
config,
introspection,
node_type_index,
parse_cache: MokaCache::new(PARSE_CACHE_CAPACITY),
response_cache: None,
}
}
pub fn pool_metrics(&self) -> PoolMetrics {
self.adapter.pool_metrics()
}
#[must_use]
pub const fn schema(&self) -> &CompiledSchema {
&self.schema
}
#[must_use]
pub const fn config(&self) -> &RuntimeConfig {
&self.config
}
#[must_use]
pub const fn adapter(&self) -> &Arc<A> {
&self.adapter
}
#[cfg(test)]
#[must_use]
pub fn parse_cache_entry_count(&self) -> u64 {
self.parse_cache.entry_count()
}
#[must_use]
pub fn with_response_cache(mut self, cache: Arc<crate::cache::ResponseCache>) -> Self {
self.response_cache = Some(cache);
self
}
#[must_use]
pub const fn response_cache(&self) -> Option<&Arc<crate::cache::ResponseCache>> {
self.response_cache.as_ref()
}
}
impl<A: DatabaseAdapter + RelayDatabaseAdapter + 'static> Executor<A> {
#[must_use]
pub fn new_with_relay(schema: CompiledSchema, adapter: Arc<A>) -> Self {
Self::with_config_and_relay(schema, adapter, RuntimeConfig::default())
}
#[must_use]
pub fn with_config_and_relay(
schema: CompiledSchema,
adapter: Arc<A>,
config: RuntimeConfig,
) -> Self {
let relay: Arc<dyn RelayDispatch> = Arc::new(RelayDispatchImpl(adapter.clone()));
let matcher = QueryMatcher::new(schema.clone());
let planner = QueryPlanner::new(config.cache_query_plans);
let introspection = IntrospectionResponses::build(&schema);
let mut node_type_index: HashMap<String, Arc<str>> = HashMap::new();
for q in &schema.queries {
if let Some(src) = q.sql_source.as_deref() {
node_type_index.entry(q.return_type.clone()).or_insert_with(|| Arc::from(src));
}
}
Self {
schema,
adapter,
relay: Some(relay),
matcher,
planner,
config,
introspection,
node_type_index,
parse_cache: MokaCache::new(PARSE_CACHE_CAPACITY),
response_cache: None,
}
}
}