use std::num::NonZeroUsize;
use std::ops::ControlFlow;
use std::sync::Arc;
use apollo_compiler::executable::Selection;
use serde_json_bytes::json;
use sha2::Digest;
use sha2::Sha256;
use crate::Configuration;
use crate::cache::storage::CacheStorage;
use crate::compute_job;
use crate::compute_job::ComputeBackPressureError;
use crate::compute_job::ComputeJobType;
use crate::graphql;
use crate::json_ext::Object;
use crate::query_planner::QueryKey;
use crate::services::layers::query_analysis::ParsedDocument;
use crate::spec;
const DEFAULT_INTROSPECTION_CACHE_CAPACITY: NonZeroUsize = NonZeroUsize::new(5).unwrap();
#[derive(Clone)]
pub(crate) struct IntrospectionCache(Mode);
#[derive(Clone)]
enum Mode {
Disabled,
Enabled {
storage: Arc<CacheStorage<String, graphql::Response>>,
max_depth: MaxDepth,
},
}
#[derive(Copy, Clone)]
enum MaxDepth {
Check,
Ignore,
}
impl IntrospectionCache {
pub(crate) fn new(configuration: &Configuration) -> Self {
if configuration.supergraph.introspection {
let storage = Arc::new(CacheStorage::new_in_memory(
DEFAULT_INTROSPECTION_CACHE_CAPACITY,
"introspection",
));
storage.activate();
Self(Mode::Enabled {
storage,
max_depth: if configuration.limits.introspection_max_depth {
MaxDepth::Check
} else {
MaxDepth::Ignore
},
})
} else {
Self(Mode::Disabled)
}
}
pub(crate) fn activate(&self) {
match &self.0 {
Mode::Disabled => {}
Mode::Enabled { storage, .. } => storage.activate(),
}
}
pub(crate) async fn maybe_execute(
&self,
schema: &Arc<spec::Schema>,
key: &QueryKey,
doc: &ParsedDocument,
variables: Object,
) -> ControlFlow<Result<graphql::Response, ComputeBackPressureError>, ()> {
Self::maybe_lone_root_typename(schema, doc)?;
if doc.operation.is_query() {
if doc.has_schema_introspection {
if doc.has_explicit_root_fields {
ControlFlow::Break(Ok(Self::mixed_fields_error()))?;
} else {
ControlFlow::Break(
self.cached_introspection(schema, key, doc, variables).await,
)?
}
} else if !doc.has_explicit_root_fields {
let max_depth = MaxDepth::Ignore;
ControlFlow::Break(Ok(Self::execute_introspection(
max_depth, schema, doc, variables,
)))?
}
}
ControlFlow::Continue(())
}
fn maybe_lone_root_typename(
schema: &Arc<spec::Schema>,
doc: &ParsedDocument,
) -> ControlFlow<Result<graphql::Response, ComputeBackPressureError>, ()> {
if doc.operation.selection_set.selections.len() == 1
&& let Selection::Field(field) = &doc.operation.selection_set.selections[0]
&& field.name == "__typename"
&& field.directives.is_empty()
{
let key = field.response_key().as_str();
let object_type_name = schema
.api_schema()
.root_operation(doc.operation.operation_type)
.expect("validation should have caught undefined root operation")
.as_str();
let data = json!({key: object_type_name});
ControlFlow::Break(Ok(graphql::Response::builder().data(data).build()))?
}
ControlFlow::Continue(())
}
fn mixed_fields_error() -> graphql::Response {
let error = graphql::Error::builder()
.message(
"\
Mixed queries with both schema introspection and concrete fields \
are not supported yet: https://github.com/apollographql/router/issues/2789\
",
)
.extension_code("MIXED_INTROSPECTION")
.build();
graphql::Response::builder().error(error).build()
}
fn introspection_cache_key(query: &str, variables: Object) -> Option<String> {
if let Ok(variable_key) = serde_json::to_string(&variables) {
let mut hasher = Sha256::new();
hasher.update(variable_key);
Some(format!("{query}:{:x}", hasher.finalize()))
} else {
tracing::warn!(
"Failed to serialize variables for introspection cache key, skipping cache: {:?}",
variables
);
None
}
}
async fn cached_introspection(
&self,
schema: &Arc<spec::Schema>,
key: &QueryKey,
doc: &ParsedDocument,
variables: Object,
) -> Result<graphql::Response, ComputeBackPressureError> {
let (storage, max_depth) = match &self.0 {
Mode::Enabled { storage, max_depth } => (storage, *max_depth),
Mode::Disabled => {
let error = graphql::Error::builder()
.message(String::from("introspection has been disabled"))
.extension_code("INTROSPECTION_DISABLED")
.build();
return Ok(graphql::Response::builder().error(error).build());
}
};
let cache_key = Self::introspection_cache_key(&key.filtered_query, variables.clone());
if let Some(cache_key) = &cache_key
&& let Some(response) = storage.get(cache_key, |_| unreachable!()).await
{
return Ok(response);
}
let schema = schema.clone();
let doc = doc.clone();
let response = compute_job::execute(ComputeJobType::Introspection, move |_| {
Self::execute_introspection(max_depth, &schema, &doc, variables)
})?
.await;
if let Some(cache_key) = cache_key {
storage.insert(cache_key, response.clone()).await;
}
Ok(response)
}
fn execute_introspection(
max_depth: MaxDepth,
schema: &spec::Schema,
doc: &ParsedDocument,
variables: Object,
) -> graphql::Response {
let api_schema = schema.api_schema();
let operation = &doc.operation;
let max_depth_result = match max_depth {
MaxDepth::Check => {
apollo_compiler::introspection::check_max_depth(&doc.executable, operation)
}
MaxDepth::Ignore => Ok(()),
};
let result = max_depth_result
.and_then(|()| {
apollo_compiler::request::coerce_variable_values(api_schema, operation, &variables)
})
.and_then(|variable_values| {
apollo_compiler::introspection::partial_execute(
api_schema,
&schema.implementers_map,
&doc.executable,
operation,
&variable_values,
)
});
match result {
Ok(response) => response.into(),
Err(e) => {
let error = e.to_graphql_error(&doc.executable.sources);
graphql::Response::builder().error(error).build()
}
}
}
}
#[cfg(test)]
mod tests {
use serde_json_bytes::json;
use crate::introspection::IntrospectionCache;
#[test]
fn test_variable_normalization_key() {
let variables = json!({
"e": true,
"a": "John Doe",
"b": 30,
"f": null,
"d": {
"b": 1,
"a": 2,
},
"c": [1, "Hello", { "d": "World","a": 3 }],
});
let key = IntrospectionCache::introspection_cache_key(
"query { __typename }",
variables.as_object().unwrap().clone(),
)
.unwrap();
assert_eq!(
key,
"query { __typename }:618d257f0ab1069a2374274c8c6e56f6b6528a839045647cedcfd147bc5dd9cf"
);
}
}