apollo-language-server 0.7.0

A GraphQL language server with first-class support for Apollo Federation
Documentation
use crate::{
    diagnostics::map_diagnostics_for_lsp,
    federation::link::ParsedLink,
    semantic_tokens::IncompleteSemanticToken,
    specs::{
        federation::{FEDERATION_SPEC_NAME, KEY_DIRECTIVE, PROVIDES_DIRECTIVE, REQUIRES_DIRECTIVE},
        KNOWN_SPECS,
    },
    utils::lsp_range_from_cst_textrange::lsp_range_from_cst_textrange,
};
use apollo_compiler::{
    ast::DirectiveDefinition, parser::SourceSpan, schema::ExtendedType, Node, Schema,
};
use apollo_parser::{cst::CstNode, Parser, SyntaxKind, SyntaxTree};
use ropey::Rope;
use std::collections::HashMap;

use super::SchemaWithMetadata;

#[derive(Debug)]
pub(crate) struct Subgraph {
    pub(crate) uri: lsp::Url,
    pub(crate) name: String,
    pub(crate) version: i32,
    pub(crate) source_text: Rope,
    pub(crate) cst: SyntaxTree,
    pub(crate) field_sets: Vec<FieldSet>,
    pub(crate) builtins: Vec<String>,
    schema_with_metadata: SchemaWithMetadata,
}

#[derive(Debug)]
pub(crate) struct FieldSet {
    // TODO: type name isn't used yet, but when we add support for completions, it will be useful
    _type_name: String,
    value: String,
    location: SourceSpan,
    cst: SyntaxTree,
}

pub enum SubgraphFederationVersion {
    V1,
    V2,
}

impl Subgraph {
    pub(crate) fn new(
        name: String,
        uri: lsp::Url,
        source_text: String,
        version: i32,
        schema_with_metadata: SchemaWithMetadata,
    ) -> Subgraph {
        let cst = Parser::new(&source_text).parse();
        let source_text_as_rope: Rope = source_text.into();

        let field_sets = Subgraph::field_sets(
            &schema_with_metadata.schema,
            &source_text_as_rope,
            &schema_with_metadata.specs_with_aliases,
        );

        let builtins = Subgraph::builtins(&schema_with_metadata.specs_with_aliases);

        Subgraph {
            uri,
            name,
            version,
            source_text: source_text_as_rope,
            cst,
            field_sets,
            builtins,
            schema_with_metadata,
        }
    }

    pub fn has_diagnostics(&self) -> bool {
        self.schema_with_metadata.parse_errors.is_some()
            || self.schema_with_metadata.build_errors.is_some()
            || self.schema_with_metadata.validation_errors.is_some()
    }

    pub(crate) fn diagnostics(&self) -> Vec<lsp::Diagnostic> {
        map_diagnostics_for_lsp(
            self.schema_with_metadata.parse_errors.as_ref(),
            self.schema_with_metadata.build_errors.as_ref(),
            self.schema_with_metadata.validation_errors.as_ref(),
            self.source_text.to_string(),
        )
        .into()
    }

    pub(crate) fn semantic_tokens(&self) -> Vec<IncompleteSemanticToken> {
        let mut tokens = vec![];

        for field_set in &self.field_sets {
            tokens.extend(field_set.to_semantic_tokens(&self.source_text));
        }

        tokens
    }

    fn field_sets(
        schema: &Schema,
        source_text: &Rope,
        specs_with_aliases: &HashMap<String, String>,
    ) -> Vec<FieldSet> {
        schema
            .types
            .iter()
            .flat_map(|(type_name, ty)| {
                let mut field_sets_on_type = vec![];
                if let ExtendedType::Object(ty) = ty {
                    ty.fields.iter().for_each(|(_, field)| {
                        field.directives.iter().for_each(|directive| {
                            if directive.name
                                == specs_with_aliases
                                    .get(PROVIDES_DIRECTIVE)
                                    .map_or(PROVIDES_DIRECTIVE, |alias| alias.as_str())
                                || directive.name
                                    == specs_with_aliases
                                        .get(REQUIRES_DIRECTIVE)
                                        .map_or(REQUIRES_DIRECTIVE, |alias| alias.as_str())
                            {
                                if let Some(field_set_node) =
                                    directive.specified_argument_by_name("fields")
                                {
                                    let location = field_set_node.location().unwrap();
                                    let char_range = location.offset()..location.end_offset();
                                    let field_set = source_text.slice(char_range).to_string();
                                    field_sets_on_type.push(FieldSet::from_field_string(
                                        field_set, location, type_name,
                                    ))
                                }
                            }
                        });
                    });
                }

                field_sets_on_type.extend(ty.directives().iter().filter_map(|directive| {
                    if directive.name
                        == specs_with_aliases
                            .get(KEY_DIRECTIVE)
                            .map_or(KEY_DIRECTIVE, |alias| alias.as_str())
                    {
                        directive
                            .specified_argument_by_name("fields")
                            .map(|field_set_node| {
                                let location = field_set_node.location().unwrap();
                                let char_range = location.offset()..location.end_offset();
                                let field_set = source_text.slice(char_range).to_string();
                                FieldSet::from_field_string(field_set, location, type_name)
                            })
                    } else {
                        None
                    }
                }));

                field_sets_on_type
            })
            .collect::<Vec<_>>()
    }

    fn builtins(specs_with_aliases: &HashMap<String, String>) -> Vec<String> {
        specs_with_aliases.values().cloned().collect::<Vec<_>>()
    }

    pub(crate) fn schema(&self) -> &Schema {
        &self.schema_with_metadata.schema
    }

    pub(crate) fn specs_with_aliases(&self) -> &HashMap<String, String> {
        &self.schema_with_metadata.specs_with_aliases
    }

    pub(crate) fn links(&self) -> &HashMap<String, ParsedLink> {
        &self.schema_with_metadata.links
    }

    #[cfg(test)]
    pub(crate) fn schema_with_metadata(&self) -> &SchemaWithMetadata {
        &self.schema_with_metadata
    }
}

pub fn fed1_definitions_are_compatible(
    user_provided_fed1_definitions: Vec<&Node<DirectiveDefinition>>,
) -> bool {
    let fed1_spec = KNOWN_SPECS
        .get(FEDERATION_SPEC_NAME)
        .expect("Federation spec not found")
        .get("1.0")
        .expect("Federation spec 1.0 not found");
    let known_definitions = Schema::parse(
        fed1_spec
            .directives
            .values()
            .map(ToString::to_string)
            .collect::<Vec<_>>()
            .join("\n"),
        "fed1_definitions.graphql",
    )
    .unwrap()
    .directive_definitions;

    // They may not have or use all of the directive definitions (i.e.
    // `@extends` is only used by subgraphs that can't use `extend`).
    user_provided_fed1_definitions.iter().all(|definition| {
        let matching_known_definition = known_definitions
            .get(definition.name.as_str())
            .unwrap_or_else(|| {
                panic!("Unknown directive definition: {}", definition.name.as_str())
            });

        definition.arguments == matching_known_definition.arguments
            && definition.locations == matching_known_definition.locations
            // If ours is repeatable, theirs can be either and still be compatible.
            // If ours is not repeatable, theirs must also not be repeatable.
            && (matching_known_definition.repeatable || !definition.repeatable)
    })
}

impl FieldSet {
    pub fn from_field_string(field_set: String, location: SourceSpan, type_name: &str) -> FieldSet {
        // The fieldset here is wrapped with double quotes or triple quotes in the case of a blockstring. We should replace these
        // with opening and closing braces to make the contents a valid graphql "operation" that we can parse.
        // i.e, "a b { c }" -> {a b { c }}
        let mut value = field_set;

        if value.starts_with("\"\"\"") {
            value = value.replacen("\"\"\"", "{  ", 1).replace("\"\"\"", "  }");
        } else {
            value = value.replacen('"', "{", 1);
            value.pop();
            value.push('}');
        }
        // If the user has any escaped quotations in the fieldset, we need to unescape them for the
        // fieldset to be parsed correctly
        value = value.replace("\\\"", " \"");

        let parsed = Parser::new(&value).parse();

        FieldSet {
            _type_name: type_name.to_string(),
            value,
            location,
            cst: parsed,
        }
    }

    pub fn to_semantic_tokens(&self, source_text: &Rope) -> Vec<IncompleteSemanticToken> {
        let mut tokens = vec![];

        let field_set_offset = self.location.offset();

        // Because the CST is treated independent of the source text, we need to adjust the offsets to place
        // the tokens in the correct location in the source text.
        for node in self.cst.document().syntax().descendants_with_tokens() {
            match node.kind() {
                SyntaxKind::IDENT => {
                    tokens.push(IncompleteSemanticToken {
                        range: lsp_range_from_cst_textrange(
                            node.text_range(),
                            source_text,
                            Some(field_set_offset as u32),
                        ),
                        token_type: 0,
                    });
                }
                SyntaxKind::BOOLEAN_VALUE | SyntaxKind::INT_VALUE => {
                    tokens.push(IncompleteSemanticToken {
                        range: lsp_range_from_cst_textrange(
                            node.text_range(),
                            source_text,
                            Some(field_set_offset as u32),
                        ),
                        token_type: 3,
                    });
                }
                // On string values, we should color the escaped quotations so they appear distinct
                // from the quotations wrapping the string
                SyntaxKind::STRING_VALUE => {
                    let token_range = lsp_range_from_cst_textrange(
                        node.text_range(),
                        source_text,
                        Some(field_set_offset as u32),
                    );

                    tokens.push(IncompleteSemanticToken {
                        range: lsp::Range::new(
                            lsp::Position::new(
                                token_range.start.line,
                                token_range.start.character - 1,
                            ),
                            lsp::Position::new(
                                token_range.start.line,
                                token_range.start.character + 1,
                            ),
                        ),
                        token_type: 1,
                    });

                    tokens.push(IncompleteSemanticToken {
                        range: lsp::Range::new(
                            lsp::Position::new(token_range.end.line, token_range.end.character - 2),
                            lsp::Position::new(token_range.end.line, token_range.end.character),
                        ),
                        token_type: 1,
                    });
                }
                SyntaxKind::L_CURLY
                | SyntaxKind::R_CURLY
                | SyntaxKind::L_PAREN
                | SyntaxKind::R_PAREN
                | SyntaxKind::L_BRACK
                | SyntaxKind::R_BRACK
                | SyntaxKind::COLON
                | SyntaxKind::COMMA => {
                    let text_range = node.text_range();

                    let (offset_start, offset_end) = {
                        let start = usize::from(text_range.start()) + field_set_offset;
                        let end = usize::from(text_range.end()) + field_set_offset;
                        (start, end)
                    };

                    let is_start_or_end_curly = offset_start == field_set_offset
                        || offset_end == field_set_offset + self.value.len();

                    if !is_start_or_end_curly {
                        tokens.push(IncompleteSemanticToken {
                            range: lsp_range_from_cst_textrange(
                                text_range,
                                source_text,
                                Some(field_set_offset as u32),
                            ),
                            token_type: 1,
                        });
                    }
                }
                _ => {}
            }
        }

        tokens
    }
}