use crate::ast;
use crate::ast::from_cst::Convert;
use crate::ast::Document;
use crate::collections::IndexMap;
use crate::executable;
use crate::schema::SchemaBuilder;
use crate::validation::Details;
use crate::validation::DiagnosticList;
use crate::validation::Valid;
use crate::validation::WithErrors;
use crate::ExecutableDocument;
use crate::Schema;
use apollo_parser::SyntaxNode;
use rowan::TextRange;
use serde::Deserialize;
use serde::Serialize;
use std::num::NonZeroU64;
use std::ops::Range;
use std::path::Path;
use std::path::PathBuf;
use std::sync::atomic;
use std::sync::atomic::AtomicU64;
use std::sync::Arc;
use std::sync::OnceLock;
#[derive(Default, Debug, Clone)]
pub struct Parser {
recursion_limit: Option<usize>,
token_limit: Option<usize>,
recursion_reached: usize,
tokens_reached: usize,
}
#[derive(Clone)]
pub struct SourceFile {
pub(crate) path: PathBuf,
pub(crate) source_text: String,
pub(crate) source: OnceLock<ariadne::Source>,
}
pub type SourceMap = Arc<IndexMap<FileId, Arc<SourceFile>>>;
#[derive(Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub struct FileId {
id: NonZeroU64,
}
#[derive(Copy, Clone)]
pub(crate) struct TaggedFileId {
tag_and_id: NonZeroU64,
}
#[derive(Clone, Copy, Hash, PartialEq, Eq)]
pub struct SourceSpan {
pub(crate) file_id: FileId,
pub(crate) text_range: TextRange,
}
#[derive(Clone, Copy, Hash, PartialEq, Eq, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct LineColumn {
pub line: usize,
pub column: usize,
}
impl std::fmt::Debug for LineColumn {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}:{}", self.line, self.column)
}
}
pub fn parse_mixed_validate(
source_text: impl Into<String>,
path: impl AsRef<Path>,
) -> Result<(Valid<Schema>, Valid<ExecutableDocument>), DiagnosticList> {
Parser::new().parse_mixed_validate(source_text, path)
}
impl Parser {
pub fn new() -> Self {
Self::default()
}
pub fn recursion_limit(mut self, value: usize) -> Self {
self.recursion_limit = Some(value);
self
}
pub fn token_limit(mut self, value: usize) -> Self {
self.token_limit = Some(value);
self
}
pub fn parse_ast(
&mut self,
source_text: impl Into<String>,
path: impl AsRef<Path>,
) -> Result<Document, WithErrors<Document>> {
let mut errors = DiagnosticList::new(Default::default());
let ast = self.parse_ast_inner(source_text, path, FileId::new(), &mut errors);
errors.into_result_with(ast)
}
pub(crate) fn parse_ast_inner(
&mut self,
source_text: impl Into<String>,
path: impl AsRef<Path>,
file_id: FileId,
errors: &mut DiagnosticList,
) -> Document {
let tree = self.parse_common(
source_text.into(),
path.as_ref().to_owned(),
file_id,
errors,
|parser| parser.parse(),
);
let sources = errors.sources.clone();
Document::from_cst(tree.document(), file_id, sources)
}
pub(crate) fn parse_common<T: apollo_parser::cst::CstNode>(
&mut self,
source_text: String,
path: PathBuf,
file_id: FileId,
errors: &mut DiagnosticList,
parse: impl FnOnce(apollo_parser::Parser) -> apollo_parser::SyntaxTree<T>,
) -> apollo_parser::SyntaxTree<T> {
let mut parser = apollo_parser::Parser::new(&source_text);
if let Some(value) = self.recursion_limit {
parser = parser.recursion_limit(value)
}
if let Some(value) = self.token_limit {
parser = parser.token_limit(value)
}
let tree = parse(parser);
self.recursion_reached = tree.recursion_limit().high;
self.tokens_reached = tree.token_limit().high;
let source_file = Arc::new(SourceFile {
path,
source_text,
source: OnceLock::new(),
});
Arc::make_mut(&mut errors.sources).insert(file_id, source_file);
for parser_error in tree.errors() {
let Ok(index) = parser_error.index().try_into() else {
continue;
};
let Ok(len) = parser_error.data().len().try_into() else {
continue;
};
let location = Some(SourceSpan {
file_id,
text_range: rowan::TextRange::at(index, len),
});
let details = if parser_error.is_limit() {
Details::ParserLimit {
message: parser_error.message().to_owned(),
}
} else {
Details::SyntaxError {
message: parser_error.message().to_owned(),
}
};
errors.push(location, details)
}
tree
}
pub fn parse_schema(
&mut self,
source_text: impl Into<String>,
path: impl AsRef<Path>,
) -> Result<Schema, WithErrors<Schema>> {
let mut builder = Schema::builder();
self.parse_into_schema_builder(source_text, path, &mut builder);
builder.build()
}
pub fn parse_into_schema_builder(
&mut self,
source_text: impl Into<String>,
path: impl AsRef<Path>,
builder: &mut SchemaBuilder,
) {
let ast = self.parse_ast_inner(source_text, path, FileId::new(), &mut builder.errors);
let executable_definitions_are_errors = true;
builder.add_ast_document_not_adding_sources(&ast, executable_definitions_are_errors);
}
pub fn parse_executable(
&mut self,
schema: &Valid<Schema>,
source_text: impl Into<String>,
path: impl AsRef<Path>,
) -> Result<ExecutableDocument, WithErrors<ExecutableDocument>> {
let (document, errors) = self.parse_executable_inner(schema, source_text, path);
errors.into_result_with(document)
}
pub(crate) fn parse_executable_inner(
&mut self,
schema: &Valid<Schema>,
source_text: impl Into<String>,
path: impl AsRef<Path>,
) -> (ExecutableDocument, DiagnosticList) {
let mut errors = DiagnosticList::new(Default::default());
let ast = self.parse_ast_inner(source_text, path, FileId::new(), &mut errors);
let document = ast.to_executable_inner(schema, &mut errors);
(document, errors)
}
pub fn parse_mixed_validate(
&mut self,
source_text: impl Into<String>,
path: impl AsRef<Path>,
) -> Result<(Valid<Schema>, Valid<ExecutableDocument>), DiagnosticList> {
let mut builder = SchemaBuilder::new();
let ast = self.parse_ast_inner(source_text, path, FileId::new(), &mut builder.errors);
let executable_definitions_are_errors = false;
let type_system_definitions_are_errors = false;
builder.add_ast_document_not_adding_sources(&ast, executable_definitions_are_errors);
let (mut schema, mut errors) = builder.build_inner();
let executable = crate::executable::from_ast::document_from_ast(
Some(&schema),
&ast,
&mut errors,
type_system_definitions_are_errors,
);
crate::schema::validation::validate_schema(&mut errors, &mut schema);
crate::executable::validation::validate_executable_document(
&mut errors,
&schema,
&executable,
);
errors
.into_result()
.map(|()| (Valid(schema), Valid(executable)))
}
pub fn parse_field_set(
&mut self,
schema: &Valid<Schema>,
type_name: ast::NamedType,
source_text: impl Into<String>,
path: impl AsRef<Path>,
) -> Result<executable::FieldSet, WithErrors<executable::FieldSet>> {
let (field_set, errors) = self.parse_field_set_inner(schema, type_name, source_text, path);
errors.into_result_with(field_set)
}
pub(crate) fn parse_field_set_inner(
&mut self,
schema: &Valid<Schema>,
type_name: ast::NamedType,
source_text: impl Into<String>,
path: impl AsRef<Path>,
) -> (executable::FieldSet, DiagnosticList) {
let file_id = FileId::new();
let mut errors = DiagnosticList::new(Default::default());
let tree = self.parse_common(
source_text.into(),
path.as_ref().to_owned(),
file_id,
&mut errors,
|parser| parser.parse_selection_set(),
);
let ast = ast::from_cst::convert_selection_set(&tree.field_set(), file_id);
let mut selection_set = executable::SelectionSet::new(type_name);
let mut build_errors = executable::from_ast::BuildErrors {
errors: &mut errors,
path: executable::SelectionPath {
nested_fields: Vec::new(),
root: executable::ExecutableDefinitionName::AnonymousOperation(
ast::OperationType::Query,
),
},
};
selection_set.extend_from_ast(Some(schema), &mut build_errors, &ast);
let field_set = executable::FieldSet {
sources: errors.sources.clone(),
selection_set,
};
(field_set, errors)
}
pub fn parse_type(
&mut self,
source_text: impl Into<String>,
path: impl AsRef<Path>,
) -> Result<ast::Type, DiagnosticList> {
let mut errors = DiagnosticList::new(Default::default());
let file_id = FileId::new();
let tree = self.parse_common(
source_text.into(),
path.as_ref().to_owned(),
file_id,
&mut errors,
|parser| parser.parse_type(),
);
errors.into_result().map(|()| {
tree.ty()
.convert(file_id)
.expect("conversion should be infallible if there were no syntax errors")
})
}
pub fn recursion_reached(&self) -> usize {
self.recursion_reached
}
pub fn tokens_reached(&self) -> usize {
self.tokens_reached
}
}
impl SourceFile {
pub fn path(&self) -> &Path {
&self.path
}
pub fn source_text(&self) -> &str {
&self.source_text
}
pub(crate) fn ariadne(&self) -> &ariadne::Source {
self.source.get_or_init(|| {
ariadne::Source::from(self.source_text.clone())
})
}
pub(crate) fn get_line_column(&self, index: usize) -> Option<LineColumn> {
let (_, zero_indexed_line, zero_indexed_column) = self.ariadne().get_byte_line(index)?;
Some(LineColumn {
line: zero_indexed_line + 1,
column: zero_indexed_column + 1,
})
}
}
impl std::fmt::Debug for SourceFile {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let Self {
path,
source_text,
source: _, } = self;
let mut debug_struct = f.debug_struct("SourceFile");
debug_struct.field("path", path);
if path != std::path::Path::new("built_in.graphql") {
debug_struct.field("source_text", source_text);
} else {
debug_struct.field(
"source_text",
&format_args!("include_str!(\"built_in.graphql\")"),
);
}
debug_struct.finish()
}
}
impl std::fmt::Debug for FileId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.id.fmt(f)
}
}
static NEXT: AtomicU64 = AtomicU64::new(INITIAL);
static INITIAL: u64 = 3;
const TAG: u64 = 1 << 63;
const ID_MASK: u64 = !TAG;
#[allow(clippy::assertions_on_constants)]
const _: () = {
assert!(TAG == 0x8000_0000_0000_0000);
assert!(ID_MASK == 0x7FFF_FFFF_FFFF_FFFF);
};
impl FileId {
pub const BUILT_IN: Self = Self::const_new(1);
pub(crate) const NONE: Self = Self::const_new(2);
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
loop {
let id = NEXT.fetch_add(1, atomic::Ordering::AcqRel);
if id & TAG == 0 {
return Self {
id: NonZeroU64::new(id).unwrap(),
};
} else {
Self::reset()
}
}
}
#[doc(hidden)]
pub fn reset() {
NEXT.store(INITIAL, atomic::Ordering::Release)
}
const fn const_new(id: u64) -> Self {
assert!(id & ID_MASK == id);
if let Some(id) = NonZeroU64::new(id) {
Self { id }
} else {
panic!()
}
}
}
impl TaggedFileId {
pub(crate) const fn pack(tag: bool, id: FileId) -> Self {
debug_assert!((id.id.get() & TAG) == 0);
let tag_and_id = if tag {
let packed = id.id.get() | TAG;
unsafe { NonZeroU64::new_unchecked(packed) }
} else {
id.id
};
Self { tag_and_id }
}
pub(crate) fn tag(self) -> bool {
(self.tag_and_id.get() & TAG) != 0
}
pub(crate) fn file_id(self) -> FileId {
let unpacked = self.tag_and_id.get() & ID_MASK;
let id = unsafe { NonZeroU64::new_unchecked(unpacked) };
FileId { id }
}
}
impl SourceSpan {
pub(crate) fn new(file_id: FileId, node: &'_ SyntaxNode) -> Self {
Self {
file_id,
text_range: node.text_range(),
}
}
pub fn file_id(&self) -> FileId {
self.file_id
}
pub fn offset(&self) -> usize {
self.text_range.start().into()
}
pub fn end_offset(&self) -> usize {
self.text_range.end().into()
}
pub fn node_len(&self) -> usize {
self.text_range.len().into()
}
pub fn recompose(start_of: Option<Self>, end_of: Option<Self>) -> Option<Self> {
match (start_of, end_of) {
(None, None) => None,
(None, single @ Some(_)) | (single @ Some(_), None) => single,
(Some(start), Some(end)) => {
if start.file_id != end.file_id {
return Some(end);
}
Some(SourceSpan {
file_id: start.file_id,
text_range: TextRange::new(start.text_range.start(), end.text_range.end()),
})
}
}
}
pub fn line_column(&self, sources: &SourceMap) -> Option<LineColumn> {
let source = sources.get(&self.file_id)?;
source.get_line_column(self.offset())
}
pub fn line_column_range(&self, sources: &SourceMap) -> Option<Range<LineColumn>> {
let source = sources.get(&self.file_id)?;
let start = source.get_line_column(self.offset())?;
let end = source.get_line_column(self.end_offset())?;
Some(Range { start, end })
}
}
impl std::fmt::Debug for SourceSpan {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}..{} @{:?}",
self.offset(),
self.end_offset(),
self.file_id,
)
}
}