use forge_ir::{
AdditionalProperties, ArrayConstraints, ArrayType, Discriminator, EnumIntType, EnumIntValue,
EnumStringType, EnumStringValue, IntKind, NamedType, ObjectConstraints, ObjectType,
PrimitiveConstraints, PrimitiveKind, PrimitiveType, Property, TypeDef, TypeRef, UnionKind,
UnionType, UnionVariant, NULL_ID,
};
use serde_json::Value as J;
use crate::ctx::Ctx;
use crate::diag;
use crate::pointer::Ptr;
use crate::refs::{self, RefOutcome};
#[derive(Debug, Clone)]
pub(crate) enum NameHint {
Named(String),
Inline { owner: String, role: String },
}
impl NameHint {
pub fn inline(owner: impl Into<String>, role: impl Into<String>) -> Self {
NameHint::Inline {
owner: owner.into(),
role: role.into(),
}
}
fn base(&self) -> String {
match self {
NameHint::Named(s) => crate::sanitize::ident(s),
NameHint::Inline { owner, role } => crate::sanitize::join(&[owner, role]),
}
}
}
pub(crate) fn detect_nullable(map: &serde_json::Map<String, J>) -> bool {
if matches!(map.get("nullable"), Some(J::Bool(true))) {
return true;
}
match map.get("type") {
Some(J::Array(items)) if items.iter().any(|v| v.as_str() == Some("null")) => true,
Some(J::String(s)) if s == "null" => true,
_ => false,
}
}
struct DeferredFeature {
key: &'static str,
code: &'static str,
msg: &'static str,
}
fn collect_deferred_features(map: &serde_json::Map<String, J>) -> Vec<DeferredFeature> {
const ENTRIES: &[(&str, &str, &str)] = &[
(
"dependentRequired",
diag::W_DEPENDENT_REQUIRED_DROPPED,
"`dependentRequired` (JSON Schema 2020-12) is not yet surfaced; dropping",
),
(
"dependentSchemas",
diag::W_DEPENDENT_SCHEMAS_DROPPED,
"`dependentSchemas` (JSON Schema 2020-12) is not yet surfaced; dropping",
),
(
"unevaluatedProperties",
diag::W_UNEVALUATED_PROPERTIES_DROPPED,
"`unevaluatedProperties` (JSON Schema 2020-12) is not yet surfaced; dropping",
),
(
"$dynamicRef",
diag::W_DYNAMIC_REF_DROPPED,
"`$dynamicRef` (JSON Schema 2020-12) is not yet surfaced; dropping",
),
(
"$dynamicAnchor",
diag::W_DYNAMIC_ANCHOR_DROPPED,
"`$dynamicAnchor` (JSON Schema 2020-12) is not yet surfaced; dropping",
),
];
let mut out = Vec::new();
for (key, code, msg) in ENTRIES {
if map.contains_key(*key) {
out.push(DeferredFeature { key, code, msg });
}
}
out
}
fn extract_types(map: &serde_json::Map<String, J>) -> Option<Vec<String>> {
match map.get("type") {
Some(J::String(s)) if s == "null" => None,
Some(J::String(s)) => Some(vec![s.clone()]),
Some(J::Array(items)) => {
let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new();
let mut out: Vec<String> = Vec::new();
for v in items {
let Some(s) = v.as_str() else { continue };
if s == "null" {
continue;
}
if seen.insert(s.to_string()) {
out.push(s.to_string());
}
}
if out.is_empty() {
None
} else {
Some(out)
}
}
_ => None,
}
}
pub(crate) fn parse_schema(
ctx: &mut Ctx,
value: &J,
ptr: &mut Ptr,
hint: NameHint,
) -> Option<TypeRef> {
let map = match value {
J::Object(m) => m,
_ => {
ctx.push_diag(diag::err(
diag::E_INVALID_TYPE,
"schema must be an object",
ptr.loc(ctx.file),
));
return None;
}
};
if let Some(r) = map.get("$ref") {
if ctx.is_oas_3_0 {
let has_non_ref_keys = map.keys().any(|k| k != "$ref" && !k.starts_with("x-"));
if has_non_ref_keys {
let dropped: Vec<&str> = map
.keys()
.filter(|k| k.as_str() != "$ref" && !k.starts_with("x-"))
.map(|k| k.as_str())
.collect();
ctx.push_diag(diag::warn(
diag::W_REF_SIBLINGS_3_0,
format!(
"schema declares `$ref` together with sibling keys ({}); OAS 3.0 \
forbids siblings on `$ref`. Dropping siblings — promote to OpenAPI \
3.1+ to keep them.",
dropped.join(", ")
),
ptr.loc(ctx.file),
));
}
}
return ptr.with_token("$ref", |ptr| resolve_ref(ctx, r, ptr, &hint));
}
for feature in collect_deferred_features(map) {
ptr.with_token(feature.key, |ptr| {
ctx.push_diag(diag::warn(feature.code, feature.msg, ptr.loc(ctx.file)));
});
}
let nullable = detect_nullable(map);
if map.contains_key("allOf") {
return crate::normalize::parse_all_of(ctx, map, ptr, hint, nullable);
}
if map.contains_key("oneOf") {
if map.contains_key("discriminator") {
return parse_oneof_discriminated(ctx, map, ptr, hint, nullable);
}
return parse_untagged_union(ctx, map, ptr, hint, nullable, "oneOf", UnionKind::OneOf);
}
if map.contains_key("anyOf") {
return parse_untagged_union(ctx, map, ptr, hint, nullable, "anyOf", UnionKind::AnyOf);
}
if map.contains_key("not") {
ptr.with_token("not", |ptr| {
ctx.push_diag(diag::err(
diag::E_COMPOSITION_NOT,
"not is not supported",
ptr.loc(ctx.file),
));
});
return None;
}
if let Some(c) = map.get("const") {
return parse_const(ctx, map, c, ptr, hint, nullable);
}
let resolved_types = extract_types(map);
if let Some(J::Array(values)) = map.get("enum") {
let is_integer = matches!(resolved_types.as_deref(), Some([t]) if t == "integer");
return if is_integer {
parse_int_enum(ctx, map, values, ptr, hint, nullable)
} else {
parse_string_enum(ctx, map, values, ptr, hint, nullable)
};
}
let types = match resolved_types {
Some(ts) => ts,
None => return parse_freeform(ctx, map, ptr, hint, nullable),
};
if types.len() > 1 {
return parse_type_array_union(ctx, map, ptr, hint, nullable, &types);
}
let ty = types[0].as_str();
match ty {
"string" | "integer" | "number" | "boolean" => {
parse_primitive(ctx, map, ptr, ty, hint, nullable)
}
"array" => parse_array(ctx, map, ptr, hint, nullable),
"object" => parse_object(ctx, map, ptr, hint, nullable),
other => {
let msg = format!("unsupported schema type `{other}`");
ptr.with_token("type", |ptr| {
ctx.push_diag(diag::err(diag::E_INVALID_TYPE, msg, ptr.loc(ctx.file)));
});
None
}
}
}
fn parse_freeform(
ctx: &mut Ctx,
map: &serde_json::Map<String, J>,
ptr: &mut Ptr,
hint: NameHint,
nullable: bool,
) -> Option<TypeRef> {
if nullable && is_bare_null_schema(map) {
match &hint {
NameHint::Inline { .. } => return Some(ensure_null_singleton(ctx)),
NameHint::Named(_) => {
let extensions = crate::operations::collect_extensions(ctx, map, ptr);
let nt = NamedType {
id: alloc_id(ctx, &hint),
original_name: original_name(&hint),
documentation: description(map),
title: title(map),
read_only: read_write_only(map).0,
write_only: read_write_only(map).1,
external_docs: crate::parse_external_docs(ctx, map.get("externalDocs"), ptr),
default: crate::parse_default(ctx, map, ptr, "schema"),
examples: crate::parse_examples(ctx, map, ptr),
xml: crate::parse_xml(ctx, map, ptr),
definition: TypeDef::Null,
extensions,
location: Some(ptr.loc(ctx.file)),
};
let id = nt.id.clone();
ctx.push_type(nt);
return Some(id);
}
}
}
let obj = ObjectType {
properties: vec![],
additional_properties: AdditionalProperties::Any,
constraints: ObjectConstraints::default(),
};
let extensions = crate::operations::collect_extensions(ctx, map, ptr);
let nt = NamedType {
id: alloc_id(ctx, &hint),
original_name: original_name(&hint),
documentation: description(map),
title: title(map),
read_only: read_write_only(map).0,
write_only: read_write_only(map).1,
external_docs: crate::parse_external_docs(ctx, map.get("externalDocs"), ptr),
default: crate::parse_default(ctx, map, ptr, "schema"),
examples: crate::parse_examples(ctx, map, ptr),
xml: crate::parse_xml(ctx, map, ptr),
definition: TypeDef::Object(obj),
extensions,
location: Some(ptr.loc(ctx.file)),
};
Some(maybe_wrap_nullable(ctx, nt, nullable))
}
fn is_bare_null_schema(map: &serde_json::Map<String, J>) -> bool {
const METADATA: &[&str] = &[
"type",
"description",
"title",
"readOnly",
"writeOnly",
"default",
"example",
"examples",
"$comment",
"deprecated",
"nullable",
];
let only_metadata = map.keys().all(|k| METADATA.contains(&k.as_str()));
if !only_metadata {
return false;
}
match map.get("type") {
Some(J::String(s)) if s == "null" => true,
Some(J::Array(items)) => items.iter().all(|v| v.as_str() == Some("null")),
None => false,
_ => false,
}
}
fn resolve_ref(ctx: &mut Ctx, r: &J, ptr: &mut Ptr, hint: &NameHint) -> Option<TypeRef> {
let raw = match r {
J::String(s) => s.as_str(),
_ => {
ctx.push_diag(diag::err(
diag::E_INVALID_TYPE,
"$ref must be a string",
ptr.loc(ctx.file),
));
return None;
}
};
match refs::resolve(ctx.refs(), raw) {
RefOutcome::Component(id) => {
let in_external = ctx
.doc_prefix
.get(&ctx.current_doc)
.map(|p| !p.is_empty())
.unwrap_or(false);
if in_external {
let canonical = ctx.current_doc.clone();
let (_, fragment) = crate::external::split_ref(raw);
walk_resolved_schema_at_pointer(ctx, &canonical, fragment, raw, ptr, hint)
} else {
Some(id)
}
}
RefOutcome::External => walk_external_ref(ctx, raw, ptr, hint),
RefOutcome::UnsupportedLocal => {
ctx.push_diag(diag::err(
diag::E_EXTERNAL_REF,
format!("$ref `{raw}` must point into #/components/schemas/"),
ptr.loc(ctx.file),
));
None
}
RefOutcome::Dangling(target) => {
ctx.push_diag(diag::err(
diag::E_DANGLING_REF,
format!(
"$ref `{raw}` does not resolve to a declared schema (looked for `{target}`)"
),
ptr.loc(ctx.file),
));
None
}
}
}
fn walk_external_ref(ctx: &mut Ctx, raw: &str, ptr: &mut Ptr, hint: &NameHint) -> Option<TypeRef> {
let (file_part, fragment) = crate::external::split_ref(raw);
if file_part.is_empty() {
ctx.push_diag(diag::err(
diag::E_EXTERNAL_REF,
format!("external $ref `{raw}` could not be resolved"),
ptr.loc(ctx.file),
));
return None;
}
let current_doc = ctx.current_doc.clone();
let loaded = match ctx.resolver.load(raw, ¤t_doc) {
Ok(d) => d,
Err(e) => {
ctx.push_diag(diag::err(
diag::E_EXTERNAL_REF,
resolver_error_message(raw, &e),
ptr.loc(ctx.file),
));
return None;
}
};
let canonical = loaded.canonical_path.clone();
ensure_doc_registered(ctx, &canonical, &loaded.root);
walk_resolved_schema_at_pointer(ctx, &canonical, fragment, raw, ptr, hint)
}
pub(crate) fn walk_resolved_schema_at_pointer(
ctx: &mut Ctx,
canonical: &std::path::Path,
fragment: &str,
raw_for_diag: &str,
ptr: &mut Ptr,
hint: &NameHint,
) -> Option<TypeRef> {
let Some(schema_name) = crate::external::fragment_last_token(fragment) else {
ctx.push_diag(diag::err(
diag::E_EXTERNAL_REF,
format!("$ref `{raw_for_diag}` has an empty fragment"),
ptr.loc(ctx.file),
));
return None;
};
let fragment_string = fragment.to_string();
let canonical = canonical.to_path_buf();
let dedup_key = (canonical.clone(), fragment_string.clone());
let target_id = match ctx.external_ref_to_id.get(&dedup_key).cloned() {
Some(existing) => existing,
None => match hint {
NameHint::Named(s) => crate::sanitize::ident(s),
NameHint::Inline { .. } => {
let prefix = ctx.doc_prefix.get(&canonical).cloned().unwrap_or_default();
format!("{prefix}{}", crate::sanitize::ident(&schema_name))
}
},
};
let walking_key = (canonical.clone(), fragment_string.clone());
if ctx.types.contains_key(&target_id) || ctx.walking.contains(&walking_key) {
return Some(target_id);
}
let Some(root) = ctx.doc_roots.get(&canonical) else {
ctx.push_diag(diag::err(
diag::E_EXTERNAL_REF,
format!("internal: doc `{}` not in cache", canonical.display()),
ptr.loc(ctx.file),
));
return None;
};
let Some(schema_value) = crate::external::resolve_pointer(root, fragment).cloned() else {
ctx.push_diag(diag::err(
diag::E_DANGLING_REF,
format!(
"$ref `{raw_for_diag}` could not be resolved against `{}`",
canonical.display()
),
ptr.loc(ctx.file),
));
return None;
};
let prev_doc = std::mem::replace(&mut ctx.current_doc, canonical.clone());
ctx.walking.insert(walking_key.clone());
let mut child_ptr = Ptr::new();
let walked = walk_with_pointer_tokens(&mut child_ptr, fragment, |p| {
parse_schema(ctx, &schema_value, p, NameHint::Named(target_id.clone()))
});
ctx.walking.remove(&walking_key);
ctx.current_doc = prev_doc;
if let Some(ref id) = walked {
if let Some(nt) = ctx.types.get_mut(id) {
nt.original_name = Some(schema_name);
}
ctx.external_ref_to_id.insert(dedup_key, id.clone());
}
walked.or(Some(target_id))
}
pub(crate) fn resolver_error_message(raw: &str, e: &crate::external::ResolverError) -> String {
use crate::external::ResolverError as RE;
match e {
RE::NotConfigured { .. } => format!(
"external $ref `{raw}` requires a file-based resolver; \
call `parse_path` instead of `parse_str`"
),
RE::UrlNotSupported { .. } => {
format!("URL $ref `{raw}` is not yet supported (file-relative refs only)")
}
RE::EscapesRoot { .. } => {
format!("external $ref `{raw}` resolves outside the input file's directory")
}
RE::Io { .. } => format!("external $ref `{raw}` could not be read from disk"),
RE::InvalidJson { .. } => {
format!("external $ref `{raw}` points at a file that is not valid JSON")
}
}
}
pub(crate) fn ensure_doc_registered(
ctx: &mut Ctx,
canonical: &std::path::Path,
root: &serde_json::Value,
) {
ctx.doc_roots
.entry(canonical.to_path_buf())
.or_insert_with(|| root.clone());
if ctx.doc_prefix.contains_key(canonical) {
return;
}
let prefix = build_doc_prefix(ctx, canonical);
ctx.doc_prefix.insert(canonical.to_path_buf(), prefix);
let mut idx = crate::refs::RefIndex::default();
let mut registered_anything = false;
if let Some(serde_json::Value::Object(schemas)) =
root.get("components").and_then(|c| c.get("schemas"))
{
for name in schemas.keys() {
idx.register(crate::sanitize::ident(name));
registered_anything = true;
}
}
if !registered_anything {
if let serde_json::Value::Object(map) = root {
for name in map.keys() {
idx.register(crate::sanitize::ident(name));
}
}
}
ctx.doc_refs.insert(canonical.to_path_buf(), idx);
}
pub(crate) fn walk_with_pointer_tokens<F, R>(ptr: &mut Ptr, fragment: &str, body: F) -> R
where
F: FnOnce(&mut Ptr) -> R,
{
let trimmed = fragment.strip_prefix('/').unwrap_or(fragment);
let tokens: Vec<String> = if trimmed.is_empty() {
Vec::new()
} else {
trimmed
.split('/')
.map(|t| t.replace("~1", "/").replace("~0", "~"))
.collect()
};
fn step<F, R>(ptr: &mut Ptr, tokens: &[String], body: F) -> R
where
F: FnOnce(&mut Ptr) -> R,
{
match tokens.split_first() {
None => body(ptr),
Some((head, rest)) => ptr.with_token(head, |ptr| step(ptr, rest, body)),
}
}
step(ptr, &tokens, body)
}
fn build_doc_prefix(ctx: &Ctx, canonical: &std::path::Path) -> String {
let stem = canonical
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("ext");
let base = crate::sanitize::ident(stem);
let mut candidate = format!("{base}__");
let mut counter = 2u32;
while ctx.doc_prefix.values().any(|p| p == &candidate) {
candidate = format!("{base}{counter}__");
counter += 1;
}
candidate
}
fn parse_primitive(
ctx: &mut Ctx,
map: &serde_json::Map<String, J>,
ptr: &mut Ptr,
ty: &str,
hint: NameHint,
nullable: bool,
) -> Option<TypeRef> {
let format = map.get("format").and_then(J::as_str);
let (kind, format_extension) = primitive_kind(ctx, ptr, ty, format)?;
let mut constraints = primitive_constraints(ctx, map);
constraints.format_extension = format_extension;
let id = alloc_id(ctx, &hint);
if let Some(cs) = map.get("contentSchema") {
let cs_ref = ptr.with_token("contentSchema", |ptr| {
parse_schema(ctx, cs, ptr, NameHint::inline(&id, "content_schema"))
});
if cs_ref.is_some() {
constraints.content_schema = cs_ref;
}
}
let prim = PrimitiveType { kind, constraints };
let extensions = crate::operations::collect_extensions(ctx, map, ptr);
let nt = NamedType {
id,
original_name: original_name(&hint),
documentation: description(map),
title: title(map),
read_only: read_write_only(map).0,
write_only: read_write_only(map).1,
external_docs: crate::parse_external_docs(ctx, map.get("externalDocs"), ptr),
default: crate::parse_default(ctx, map, ptr, "schema"),
examples: crate::parse_examples(ctx, map, ptr),
xml: crate::parse_xml(ctx, map, ptr),
definition: TypeDef::Primitive(prim),
extensions,
location: Some(ptr.loc(ctx.file)),
};
Some(maybe_wrap_nullable(ctx, nt, nullable))
}
fn primitive_kind(
_ctx: &mut Ctx,
ptr: &mut Ptr,
ty: &str,
format: Option<&str>,
) -> Option<(PrimitiveKind, Option<String>)> {
use PrimitiveKind as P;
let result = match ty {
"string" => (P::String, format.map(String::from)),
"integer" => (P::Integer, format.map(String::from)),
"number" => (P::Number, format.map(String::from)),
"boolean" => (P::Bool, format.map(String::from)),
_ => {
_ctx.push_diag(diag::err(
diag::E_INVALID_TYPE,
format!("unsupported `type` value: `{ty}`"),
ptr.loc(_ctx.file),
));
return None;
}
};
Some(result)
}
fn primitive_constraints(ctx: &mut Ctx, map: &serde_json::Map<String, J>) -> PrimitiveConstraints {
let (minimum, exclusive_minimum) =
normalise_exclusive_bound(ctx, map.get("minimum"), map.get("exclusiveMinimum"));
let (maximum, exclusive_maximum) =
normalise_exclusive_bound(ctx, map.get("maximum"), map.get("exclusiveMaximum"));
PrimitiveConstraints {
minimum,
maximum,
exclusive_minimum,
exclusive_maximum,
multiple_of: map.get("multipleOf").map(|v| ctx.values.intern_json(v)),
min_length: map.get("minLength").and_then(J::as_u64),
max_length: map.get("maxLength").and_then(J::as_u64),
pattern: map.get("pattern").and_then(J::as_str).map(String::from),
format_extension: None,
content_encoding: map
.get("contentEncoding")
.and_then(J::as_str)
.map(String::from),
content_media_type: map
.get("contentMediaType")
.and_then(J::as_str)
.map(String::from),
content_schema: None,
}
}
fn normalise_exclusive_bound(
ctx: &mut Ctx,
inclusive: Option<&J>,
exclusive: Option<&J>,
) -> (Option<forge_ir::ValueRef>, Option<forge_ir::ValueRef>) {
let inclusive_ref = inclusive.map(|v| ctx.values.intern_json(v));
match exclusive {
Some(J::Bool(true)) => {
let flag = ctx.values.intern(forge_ir::Value::Bool { value: true });
(inclusive_ref, Some(flag))
}
Some(J::Bool(false)) | None => (inclusive_ref, None),
Some(num @ J::Number(_)) => {
let promoted = ctx.values.intern_json(num);
let inclusive = inclusive_ref.or(Some(promoted));
let flag = ctx.values.intern(forge_ir::Value::Bool { value: true });
(inclusive, Some(flag))
}
Some(_) => (inclusive_ref, None),
}
}
fn parse_array(
ctx: &mut Ctx,
map: &serde_json::Map<String, J>,
ptr: &mut Ptr,
hint: NameHint,
nullable: bool,
) -> Option<TypeRef> {
let id = alloc_id(ctx, &hint);
let items_ref = match map.get("items") {
Some(v) => ptr.with_token("items", |ptr| {
parse_schema(ctx, v, ptr, NameHint::inline(&id, "items"))
})?,
None => {
ctx.push_diag(diag::err(
diag::E_MISSING_FIELD,
"array schema missing `items`",
ptr.loc(ctx.file),
));
return None;
}
};
let constraints = ArrayConstraints {
min_items: map.get("minItems").and_then(J::as_u64),
max_items: map.get("maxItems").and_then(J::as_u64),
unique_items: map.get("uniqueItems").and_then(J::as_bool).unwrap_or(false),
};
let arr = ArrayType {
items: items_ref,
constraints,
};
let extensions = crate::operations::collect_extensions(ctx, map, ptr);
let nt = NamedType {
id,
original_name: original_name(&hint),
documentation: description(map),
title: title(map),
read_only: read_write_only(map).0,
write_only: read_write_only(map).1,
external_docs: crate::parse_external_docs(ctx, map.get("externalDocs"), ptr),
default: crate::parse_default(ctx, map, ptr, "schema"),
examples: crate::parse_examples(ctx, map, ptr),
xml: crate::parse_xml(ctx, map, ptr),
definition: TypeDef::Array(arr),
extensions,
location: Some(ptr.loc(ctx.file)),
};
Some(maybe_wrap_nullable(ctx, nt, nullable))
}
fn parse_object(
ctx: &mut Ctx,
map: &serde_json::Map<String, J>,
ptr: &mut Ptr,
hint: NameHint,
nullable: bool,
) -> Option<TypeRef> {
let id = alloc_id(ctx, &hint);
let mut properties: Vec<Property> = Vec::new();
if let Some(J::Object(props)) = map.get("properties") {
ptr.with_token("properties", |ptr| {
for (name, schema) in props {
ptr.with_token(name, |ptr| {
let role = format!("property_{}", crate::sanitize::ident(name));
if let Some(t) = parse_schema(ctx, schema, ptr, NameHint::inline(&id, &role)) {
let (doc, deprecated, read_only, write_only, default) = match schema {
J::Object(m) => (
description(m),
m.get("deprecated").and_then(J::as_bool).unwrap_or(false),
m.get("readOnly").and_then(J::as_bool).unwrap_or(false),
m.get("writeOnly").and_then(J::as_bool).unwrap_or(false),
crate::parse_default(ctx, m, ptr, "property"),
),
_ => (None, false, false, false, None),
};
let extensions = match schema {
J::Object(m) => crate::operations::collect_extensions(ctx, m, ptr),
_ => Vec::new(),
};
properties.push(Property {
name: name.clone(),
r#type: t,
required: false, documentation: doc,
deprecated,
read_only,
write_only,
default,
extensions,
});
}
});
}
});
}
let required_names: std::collections::HashSet<String> = match map.get("required") {
Some(J::Array(items)) => items
.iter()
.filter_map(|v| v.as_str().map(str::to_string))
.collect(),
_ => std::collections::HashSet::new(),
};
for p in properties.iter_mut() {
if required_names.contains(&p.name) {
p.required = true;
}
}
let additional = match map.get("additionalProperties") {
Some(J::Bool(false)) => AdditionalProperties::Forbidden,
Some(J::Bool(true)) | None => AdditionalProperties::Any,
Some(J::Object(_)) => ptr
.with_token("additionalProperties", |ptr| {
parse_schema(
ctx,
map.get("additionalProperties").unwrap(),
ptr,
NameHint::inline(&id, "additional_properties"),
)
})
.map(|t| AdditionalProperties::Typed { r#type: t })
.unwrap_or(AdditionalProperties::Any),
Some(_) => {
ctx.push_diag(diag::err(
diag::E_INVALID_TYPE,
"`additionalProperties` must be a boolean or schema object",
ptr.loc(ctx.file),
));
AdditionalProperties::Any
}
};
let constraints = ObjectConstraints {
min_properties: map.get("minProperties").and_then(J::as_u64),
max_properties: map.get("maxProperties").and_then(J::as_u64),
};
let obj = ObjectType {
properties,
additional_properties: additional,
constraints,
};
let extensions = crate::operations::collect_extensions(ctx, map, ptr);
let nt = NamedType {
id,
original_name: original_name(&hint),
documentation: description(map),
title: title(map),
read_only: read_write_only(map).0,
write_only: read_write_only(map).1,
external_docs: crate::parse_external_docs(ctx, map.get("externalDocs"), ptr),
default: crate::parse_default(ctx, map, ptr, "schema"),
examples: crate::parse_examples(ctx, map, ptr),
xml: crate::parse_xml(ctx, map, ptr),
definition: TypeDef::Object(obj),
extensions,
location: Some(ptr.loc(ctx.file)),
};
Some(maybe_wrap_nullable(ctx, nt, nullable))
}
fn parse_string_enum(
ctx: &mut Ctx,
map: &serde_json::Map<String, J>,
raw_values: &[J],
ptr: &mut Ptr,
hint: NameHint,
nullable: bool,
) -> Option<TypeRef> {
let mut values: Vec<EnumStringValue> = Vec::new();
let mut nullable = nullable;
ptr.with_token("enum", |ptr| {
for (i, v) in raw_values.iter().enumerate() {
ptr.with_index(i, |ptr| match v {
J::String(s) => values.push(EnumStringValue {
value: s.clone(),
documentation: None,
}),
J::Null => {
nullable = true;
}
other => {
ctx.push_diag(diag::warn(
diag::W_ENUM_VALUE_DROPPED,
format!(
"string enum value `{}` is not a string; dropped",
short_json(other)
),
ptr.loc(ctx.file),
));
}
});
}
});
if values.is_empty() {
ctx.push_diag(diag::err(
diag::E_INVALID_TYPE,
"string enum has no usable values",
ptr.loc(ctx.file),
));
return None;
}
let extensions = crate::operations::collect_extensions(ctx, map, ptr);
let nt = NamedType {
id: alloc_id(ctx, &hint),
original_name: original_name(&hint),
documentation: description(map),
title: title(map),
read_only: read_write_only(map).0,
write_only: read_write_only(map).1,
external_docs: crate::parse_external_docs(ctx, map.get("externalDocs"), ptr),
default: crate::parse_default(ctx, map, ptr, "schema"),
examples: crate::parse_examples(ctx, map, ptr),
xml: crate::parse_xml(ctx, map, ptr),
definition: TypeDef::EnumString(EnumStringType { values }),
extensions,
location: Some(ptr.loc(ctx.file)),
};
Some(maybe_wrap_nullable(ctx, nt, nullable))
}
fn parse_int_enum(
ctx: &mut Ctx,
map: &serde_json::Map<String, J>,
raw_values: &[J],
ptr: &mut Ptr,
hint: NameHint,
nullable: bool,
) -> Option<TypeRef> {
let kind = match map.get("format").and_then(J::as_str) {
Some("int64") => IntKind::Int64,
_ => IntKind::Int32,
};
let mut values: Vec<EnumIntValue> = Vec::new();
let mut nullable = nullable;
ptr.with_token("enum", |ptr| {
for (i, v) in raw_values.iter().enumerate() {
ptr.with_index(i, |ptr| match v {
J::Number(n) => match n.as_i64() {
Some(value) => values.push(EnumIntValue {
value,
documentation: None,
}),
None => ctx.push_diag(diag::warn(
diag::W_ENUM_VALUE_DROPPED,
format!("integer enum value `{n}` is not an i64; dropped"),
ptr.loc(ctx.file),
)),
},
J::Null => nullable = true,
other => {
ctx.push_diag(diag::warn(
diag::W_ENUM_VALUE_DROPPED,
format!(
"integer enum value `{}` is not a number; dropped",
short_json(other)
),
ptr.loc(ctx.file),
));
}
});
}
});
if values.is_empty() {
ctx.push_diag(diag::err(
diag::E_INVALID_TYPE,
"integer enum has no usable values",
ptr.loc(ctx.file),
));
return None;
}
let extensions = crate::operations::collect_extensions(ctx, map, ptr);
let nt = NamedType {
id: alloc_id(ctx, &hint),
original_name: original_name(&hint),
documentation: description(map),
title: title(map),
read_only: read_write_only(map).0,
write_only: read_write_only(map).1,
external_docs: crate::parse_external_docs(ctx, map.get("externalDocs"), ptr),
default: crate::parse_default(ctx, map, ptr, "schema"),
examples: crate::parse_examples(ctx, map, ptr),
xml: crate::parse_xml(ctx, map, ptr),
definition: TypeDef::EnumInt(EnumIntType { values, kind }),
extensions,
location: Some(ptr.loc(ctx.file)),
};
Some(maybe_wrap_nullable(ctx, nt, nullable))
}
fn lift_union_variants(
ctx: &mut Ctx,
parts: &[J],
ptr: &mut Ptr,
owner_id: &str,
composition_key: &str,
) -> (Vec<UnionVariant>, Vec<TypeRef>) {
let mut variants: Vec<UnionVariant> = Vec::new();
let mut variant_refs: Vec<TypeRef> = Vec::new();
ptr.with_token(composition_key, |ptr| {
for (i, sub) in parts.iter().enumerate() {
ptr.with_index(i, |ptr| {
let role = format!("variant_{i}");
if let Some(t) = parse_schema(ctx, sub, ptr, NameHint::inline(owner_id, &role)) {
variant_refs.push(t.clone());
variants.push(UnionVariant {
r#type: t,
tag: None,
});
}
});
}
});
(variants, variant_refs)
}
fn parse_untagged_union(
ctx: &mut Ctx,
map: &serde_json::Map<String, J>,
ptr: &mut Ptr,
hint: NameHint,
nullable: bool,
composition_key: &str,
kind: UnionKind,
) -> Option<TypeRef> {
let id = alloc_id(ctx, &hint);
let parts = match map.get(composition_key) {
Some(J::Array(items)) if !items.is_empty() => items.clone(),
_ => {
ptr.with_token(composition_key, |ptr| {
ctx.push_diag(diag::err(
diag::E_INVALID_TYPE,
format!("`{composition_key}` must be a non-empty array"),
ptr.loc(ctx.file),
));
});
return None;
}
};
let (mut variants, _refs) = lift_union_variants(ctx, &parts, ptr, &id, composition_key);
if variants.is_empty() {
return None;
}
if nullable {
let null_id = ensure_null_singleton(ctx);
variants.push(UnionVariant {
r#type: null_id,
tag: None,
});
}
let union = UnionType {
variants,
discriminator: None,
kind,
};
let extensions = crate::operations::collect_extensions(ctx, map, ptr);
let nt = NamedType {
id: id.clone(),
original_name: original_name(&hint),
documentation: description(map),
title: title(map),
read_only: read_write_only(map).0,
write_only: read_write_only(map).1,
external_docs: crate::parse_external_docs(ctx, map.get("externalDocs"), ptr),
default: crate::parse_default(ctx, map, ptr, "schema"),
examples: crate::parse_examples(ctx, map, ptr),
xml: crate::parse_xml(ctx, map, ptr),
definition: TypeDef::Union(union),
extensions,
location: Some(ptr.loc(ctx.file)),
};
ctx.push_type(nt);
Some(id)
}
fn parse_type_array_union(
ctx: &mut Ctx,
map: &serde_json::Map<String, J>,
ptr: &mut Ptr,
hint: NameHint,
nullable: bool,
types: &[String],
) -> Option<TypeRef> {
let id = alloc_id(ctx, &hint);
let parts: Vec<J> = types
.iter()
.map(|t| {
let mut m = serde_json::Map::new();
m.insert("type".to_string(), J::String(t.clone()));
J::Object(m)
})
.collect();
let (mut variants, _refs) = lift_union_variants(ctx, &parts, ptr, &id, "type");
if variants.is_empty() {
return None;
}
if nullable {
let null_id = ensure_null_singleton(ctx);
variants.push(UnionVariant {
r#type: null_id,
tag: None,
});
}
let union = UnionType {
variants,
discriminator: None,
kind: UnionKind::OneOf,
};
let extensions = crate::operations::collect_extensions(ctx, map, ptr);
let nt = NamedType {
id: id.clone(),
original_name: original_name(&hint),
documentation: description(map),
title: title(map),
read_only: read_write_only(map).0,
write_only: read_write_only(map).1,
external_docs: crate::parse_external_docs(ctx, map.get("externalDocs"), ptr),
default: crate::parse_default(ctx, map, ptr, "schema"),
examples: crate::parse_examples(ctx, map, ptr),
xml: crate::parse_xml(ctx, map, ptr),
definition: TypeDef::Union(union),
extensions,
location: Some(ptr.loc(ctx.file)),
};
ctx.push_type(nt);
Some(id)
}
fn parse_const(
ctx: &mut Ctx,
map: &serde_json::Map<String, J>,
c: &J,
ptr: &mut Ptr,
hint: NameHint,
nullable: bool,
) -> Option<TypeRef> {
use forge_ir::{EnumIntType, EnumIntValue, EnumStringType, EnumStringValue, IntKind};
let nt_definition = match c {
J::String(s) => TypeDef::EnumString(EnumStringType {
values: vec![EnumStringValue {
value: s.clone(),
documentation: None,
}],
}),
J::Number(n) => {
let Some(int) = n.as_i64() else {
ctx.push_diag(diag::err(
diag::E_INVALID_TYPE,
format!(
"`const: {n}` is a non-integer number; integer-, string-, and \
null-typed `const` values are supported."
),
ptr.loc(ctx.file),
));
return None;
};
let kind = match map.get("format").and_then(J::as_str) {
Some("int64") => IntKind::Int64,
_ => IntKind::Int32,
};
TypeDef::EnumInt(EnumIntType {
values: vec![EnumIntValue {
value: int,
documentation: None,
}],
kind,
})
}
J::Null => {
return Some(match &hint {
NameHint::Inline { .. } => ensure_null_singleton(ctx),
NameHint::Named(_) => {
let extensions = crate::operations::collect_extensions(ctx, map, ptr);
let nt = NamedType {
id: alloc_id(ctx, &hint),
original_name: original_name(&hint),
documentation: description(map),
title: title(map),
read_only: read_write_only(map).0,
write_only: read_write_only(map).1,
external_docs: crate::parse_external_docs(
ctx,
map.get("externalDocs"),
ptr,
),
default: crate::parse_default(ctx, map, ptr, "schema"),
examples: crate::parse_examples(ctx, map, ptr),
xml: crate::parse_xml(ctx, map, ptr),
definition: TypeDef::Null,
extensions,
location: Some(ptr.loc(ctx.file)),
};
let id = nt.id.clone();
ctx.push_type(nt);
id
}
});
}
other => {
ctx.push_diag(diag::err(
diag::E_INVALID_TYPE,
format!(
"`const` value `{}` is not a string, integer, or null",
serde_json::to_string(other).unwrap_or_default()
),
ptr.loc(ctx.file),
));
return None;
}
};
let extensions = crate::operations::collect_extensions(ctx, map, ptr);
let nt = NamedType {
id: alloc_id(ctx, &hint),
original_name: original_name(&hint),
documentation: description(map),
title: title(map),
read_only: read_write_only(map).0,
write_only: read_write_only(map).1,
external_docs: crate::parse_external_docs(ctx, map.get("externalDocs"), ptr),
default: crate::parse_default(ctx, map, ptr, "schema"),
examples: crate::parse_examples(ctx, map, ptr),
xml: crate::parse_xml(ctx, map, ptr),
definition: nt_definition,
extensions,
location: Some(ptr.loc(ctx.file)),
};
Some(maybe_wrap_nullable(ctx, nt, nullable))
}
fn parse_oneof_discriminated(
ctx: &mut Ctx,
map: &serde_json::Map<String, J>,
ptr: &mut Ptr,
hint: NameHint,
nullable: bool,
) -> Option<TypeRef> {
let id = alloc_id(ctx, &hint);
let one_of = match map.get("oneOf") {
Some(J::Array(items)) if !items.is_empty() => items.clone(),
_ => {
ptr.with_token("oneOf", |ptr| {
ctx.push_diag(diag::err(
diag::E_INVALID_TYPE,
"`oneOf` must be a non-empty array",
ptr.loc(ctx.file),
));
});
return None;
}
};
let (mut variants, variant_refs) = lift_union_variants(ctx, &one_of, ptr, &id, "oneOf");
if variants.is_empty() {
return None;
}
let disc_obj = match map.get("discriminator") {
Some(J::Object(o)) => o,
_ => {
ptr.with_token("discriminator", |ptr| {
ctx.push_diag(diag::err(
diag::E_INVALID_TYPE,
"discriminator must be an object",
ptr.loc(ctx.file),
));
});
return None;
}
};
let property_name = match disc_obj.get("propertyName").and_then(J::as_str) {
Some(p) => p.to_string(),
None => {
ptr.with_token("discriminator", |ptr| {
ctx.push_diag(diag::err(
diag::E_MISSING_FIELD,
"discriminator is missing `propertyName`",
ptr.loc(ctx.file),
));
});
return None;
}
};
let mut explicit: Vec<(String, TypeRef)> = Vec::new();
if let Some(J::Object(m)) = disc_obj.get("mapping") {
ptr.with_token("discriminator", |ptr| {
ptr.with_token("mapping", |ptr| {
for (tag, raw_target) in m {
let Some(s) = raw_target.as_str() else {
continue;
};
let id = ref_target_to_id(s);
if variant_refs.iter().any(|v| v == &id) {
explicit.push((tag.clone(), id));
} else {
ptr.with_token(tag, |ptr| {
ctx.push_diag(diag::warn(
diag::W_DISCRIMINATOR_MAPPING_DANGLING,
format!(
"discriminator mapping `{tag}` -> `{s}` does not match any oneOf variant; dropping"
),
ptr.loc(ctx.file),
));
});
}
}
});
});
}
let mut mapping: Vec<(String, TypeRef)> = explicit.clone();
for vref in &variant_refs {
if !mapping.iter().any(|(_, t)| t == vref) {
mapping.push((vref.clone(), vref.clone()));
}
}
for v in variants.iter_mut() {
if let Some((tag, _)) = mapping.iter().find(|(_, t)| t == &v.r#type) {
v.tag = Some(tag.clone());
}
}
let extensions = ptr.with_token("discriminator", |ptr| {
crate::operations::collect_extensions(ctx, disc_obj, ptr)
});
if nullable {
let null_id = ensure_null_singleton(ctx);
variants.push(UnionVariant {
r#type: null_id,
tag: None,
});
}
let union = UnionType {
variants,
discriminator: Some(Discriminator {
property_name,
mapping,
extensions,
}),
kind: UnionKind::OneOf,
};
let outer_extensions = crate::operations::collect_extensions(ctx, map, ptr);
let nt = NamedType {
id: id.clone(),
original_name: original_name(&hint),
documentation: description(map),
title: title(map),
read_only: read_write_only(map).0,
write_only: read_write_only(map).1,
external_docs: crate::parse_external_docs(ctx, map.get("externalDocs"), ptr),
default: crate::parse_default(ctx, map, ptr, "schema"),
examples: crate::parse_examples(ctx, map, ptr),
xml: crate::parse_xml(ctx, map, ptr),
definition: TypeDef::Union(union),
extensions: outer_extensions,
location: Some(ptr.loc(ctx.file)),
};
ctx.push_type(nt);
Some(id)
}
fn ref_target_to_id(raw: &str) -> String {
let name = raw
.strip_prefix("#/components/schemas/")
.unwrap_or(raw)
.trim_start_matches('/');
crate::sanitize::ident(name)
}
pub(crate) fn alloc_id(ctx: &mut Ctx, hint: &NameHint) -> String {
match hint {
NameHint::Named(s) => {
let id = crate::sanitize::ident(s);
if id == NULL_ID {
ensure_null_singleton(ctx);
let bumped = ctx.unique_id(&id);
ctx.push_diag(diag::warn(
diag::W_RESERVED_NAME,
format!("schema id `{id}` is reserved for the Null type singleton; renamed to `{bumped}`"),
forge_ir::SpecLocation::new(""),
));
return bumped;
}
id
}
NameHint::Inline { .. } => {
let base = hint.base();
ctx.unique_id(&base)
}
}
}
pub(crate) fn ensure_null_singleton(ctx: &mut Ctx) -> TypeRef {
if !ctx.types.contains_key(NULL_ID) {
ctx.push_type(NamedType {
id: NULL_ID.to_string(),
original_name: None,
documentation: None,
title: None,
read_only: false,
write_only: false,
external_docs: None,
default: None,
examples: vec![],
xml: None,
definition: TypeDef::Null,
extensions: vec![],
location: None,
});
}
NULL_ID.to_string()
}
pub(crate) fn maybe_wrap_nullable(ctx: &mut Ctx, nt: NamedType, nullable: bool) -> TypeRef {
if !nullable {
let id = nt.id.clone();
ctx.push_type(nt);
return id;
}
let outer_id = nt.id.clone();
let inner_id = ctx.unique_id(&format!("{outer_id}_nonnull"));
let inner = NamedType {
id: inner_id.clone(),
original_name: None,
documentation: None,
title: None,
read_only: false,
write_only: false,
external_docs: None,
default: None,
examples: vec![],
xml: None,
definition: nt.definition,
extensions: vec![],
location: nt.location.clone(),
};
ctx.push_type(inner);
let null_id = ensure_null_singleton(ctx);
let union = TypeDef::Union(UnionType {
variants: vec![
UnionVariant {
r#type: inner_id,
tag: None,
},
UnionVariant {
r#type: null_id,
tag: None,
},
],
discriminator: None,
kind: UnionKind::OneOf,
});
let outer = NamedType {
id: outer_id.clone(),
original_name: nt.original_name,
documentation: nt.documentation,
title: nt.title,
read_only: nt.read_only,
write_only: nt.write_only,
external_docs: nt.external_docs,
default: nt.default,
examples: nt.examples,
xml: nt.xml,
definition: union,
extensions: nt.extensions,
location: nt.location,
};
ctx.push_type(outer);
outer_id
}
pub(crate) fn original_name(hint: &NameHint) -> Option<String> {
match hint {
NameHint::Named(s) => Some(s.clone()),
NameHint::Inline { .. } => None,
}
}
pub(crate) fn description(map: &serde_json::Map<String, J>) -> Option<String> {
map.get("description").and_then(J::as_str).map(String::from)
}
pub(crate) fn title(map: &serde_json::Map<String, J>) -> Option<String> {
map.get("title").and_then(J::as_str).map(String::from)
}
pub(crate) fn read_write_only(map: &serde_json::Map<String, J>) -> (bool, bool) {
let read_only = map.get("readOnly").and_then(J::as_bool).unwrap_or(false);
let write_only = map.get("writeOnly").and_then(J::as_bool).unwrap_or(false);
(read_only, write_only)
}
fn short_json(v: &J) -> String {
let s = serde_json::to_string(v).unwrap_or_default();
if s.len() > 40 {
let mut end = 40;
while !s.is_char_boundary(end) {
end -= 1;
}
format!("{}…", &s[..end])
} else {
s
}
}