pathfinder_class_hash/lib.rs
1//! Class hash computation for Cairo and Sierra contracts.
2//!
3//! This crate provides functionality to compute class hashes for both Cairo 0.x
4//! and Sierra (Cairo 1.x+) contracts in the Starknet ecosystem. The class hash
5//! is a unique identifier for a contract's code that is used throughout the
6//! Starknet protocol.
7//!
8//! # Class Hash Types
9//!
10//! There are two main types of class hashes:
11//!
12//! * Cairo 0.x class hashes - Computed for legacy Cairo contracts
13//! * Sierra class hashes - Computed for newer Cairo 1.x+ contracts using the
14//! Sierra intermediate representation
15//!
16//! # Main Components
17//!
18//! * [`compute_class_hash`] - The main entry point for computing class hashes
19//! for both Cairo and Sierra contracts
20//! * [`ComputedClassHash`] - An enum representing the computed hash for either
21//! contract type
22//! * [`PreparedCairoContractDefinition`] - A prepared Cairo contract definition
23//! ready for hashing
24//! * [`RawCairoContractDefinition`] - An unprepared Cairo contract definition
25//!
26//! # Implementation Details
27//!
28//! ## Cairo Class Hash
29//!
30//! The Cairo class hash computation follows these steps:
31//!
32//! 1. The contract definition is prepared by removing debug info and handling
33//! special cases for Cairo 0.8+ attributes
34//! 2. The prepared definition is serialized to JSON with Python-compatible
35//! formatting
36//! 3. A truncated Keccak hash is computed from the serialized JSON
37//! 4. Entry points, builtins, and bytecode are processed through hash chains
38//! 5. The final class hash is computed by combining all components
39//!
40//! ## Sierra Class Hash
41//!
42//! The Sierra class hash computation is simpler:
43//!
44//! 1. The contract version is validated
45//! 2. Entry points are processed in order
46//! 3. The ABI string is hashed
47//! 4. The Sierra program is hashed
48//! 5. All components are combined into the final hash
49//!
50//! # Compatibility
51//!
52//! This crate maintains compatibility with the official Starknet implementation
53//! and includes extensive test vectors to ensure hash computation matches the
54//! network's expectations.
55//!
56//! See the [official Starknet documentation](https://docs.starknet.io/documentation/architecture_and_concepts/Contracts/class-hash/)
57//! for more details on class hash computation.
58
59use anyhow::{Context, Error, Result};
60use pathfinder_common::class_definition::EntryPointType::*;
61use pathfinder_common::{felt_bytes, ClassHash};
62use pathfinder_crypto::hash::{HashChain, PoseidonHasher};
63use pathfinder_crypto::Felt;
64use serde::Serialize;
65use sha3::Digest;
66
67/// Computed class hash
68#[derive(Debug, PartialEq)]
69pub enum ComputedClassHash {
70 Cairo(ClassHash),
71 Sierra(ClassHash),
72}
73
74impl ComputedClassHash {
75 pub fn hash(&self) -> ClassHash {
76 match self {
77 ComputedClassHash::Cairo(h) => *h,
78 ComputedClassHash::Sierra(h) => *h,
79 }
80 }
81}
82
83/// Computes the starknet class hash for given class definition JSON blob.
84///
85/// This function first parses the JSON blob to decide if it's a Cairo or Sierra
86/// class definition and then calls the appropriate function to compute the
87/// class hash with the parsed definition.
88pub fn compute_class_hash(contract_definition_dump: &[u8]) -> Result<ComputedClassHash> {
89 let contract_definition = parse_contract_definition(contract_definition_dump)
90 .context("Failed to parse contract definition")?;
91
92 match contract_definition {
93 json::ContractDefinition::Sierra(definition) => compute_sierra_class_hash(definition)
94 .map(ComputedClassHash::Sierra)
95 .context("Compute class hash"),
96 json::ContractDefinition::Cairo(definition) => compute_cairo_class_hash(definition.into())
97 .map(ComputedClassHash::Cairo)
98 .context("Compute class hash"),
99 }
100}
101
102/// Compute class hash for a Cairo contract definition
103pub fn compute_cairo_hinted_class_hash(
104 contract_definition: &PreparedCairoContractDefinition<'_>,
105) -> Result<Felt> {
106 use std::io::Write;
107
108 // It's less efficient than tweaking the formatter to emit the encoding but I
109 // don't know how and this is an emergency issue (mainnt nodes stuck).
110 let mut string_buffer = vec![];
111
112 let mut ser =
113 serde_json::Serializer::with_formatter(&mut string_buffer, PythonDefaultFormatter);
114
115 contract_definition
116 .0
117 .serialize(&mut ser)
118 .context("Serializing contract_definition for Keccak256")?;
119
120 let raw_json_output = String::from_utf8(string_buffer).expect("Invalid UTF-8");
121
122 let mut keccak_writer = KeccakWriter::default();
123 keccak_writer
124 .write_all(raw_json_output.as_bytes())
125 .expect("Failed to write to KeccakWriter");
126
127 let KeccakWriter(hash) = keccak_writer;
128 Ok(truncated_keccak(<[u8; 32]>::from(hash.finalize())))
129}
130
131/// Parse either a Sierra or a Cairo contract definition.
132///
133/// Due to an issue in serde_json we can't use an untagged enum and simply
134/// derive a Deserialize implementation: <https://github.com/serde-rs/json/issues/559>
135pub fn parse_contract_definition(
136 contract_definition_dump: &[u8],
137) -> serde_json::Result<json::ContractDefinition<'_>> {
138 serde_json::from_slice::<json::SierraContractDefinition<'_>>(contract_definition_dump)
139 .map(json::ContractDefinition::Sierra)
140 .or_else(|_| {
141 serde_json::from_slice::<json::CairoContractDefinition<'_>>(contract_definition_dump)
142 .map(json::ContractDefinition::Cairo)
143 })
144}
145
146/// Helpers to compute class hashes from the parts that compose a Cairo or
147/// Sierra contract
148pub mod from_parts {
149 use std::collections::HashMap;
150
151 use anyhow::Result;
152 use pathfinder_common::class_definition::{
153 EntryPointType,
154 SelectorAndOffset,
155 SierraEntryPoints,
156 };
157 use pathfinder_common::ClassHash;
158 use pathfinder_crypto::Felt;
159
160 use super::json;
161
162 /// Compute class hash from the parts that compose a Cairo contract
163 pub fn compute_cairo_class_hash(
164 abi: &[u8],
165 program: &[u8],
166 external_entry_points: Vec<SelectorAndOffset>,
167 l1_handler_entry_points: Vec<SelectorAndOffset>,
168 constructor_entry_points: Vec<SelectorAndOffset>,
169 ) -> Result<ClassHash> {
170 let mut entry_points_by_type = HashMap::new();
171 entry_points_by_type.insert(EntryPointType::External, external_entry_points);
172 entry_points_by_type.insert(EntryPointType::L1Handler, l1_handler_entry_points);
173 entry_points_by_type.insert(EntryPointType::Constructor, constructor_entry_points);
174
175 let contract_definition = json::CairoContractDefinition {
176 abi: serde_json::from_slice(abi)?,
177 program: serde_json::from_slice(program)?,
178 entry_points_by_type,
179 };
180
181 super::compute_cairo_class_hash(contract_definition.into())
182 }
183
184 /// Compute class hash from the parts that compose a Sierra contract
185 pub fn compute_sierra_class_hash(
186 abi: &str,
187 sierra_program: Vec<Felt>,
188 contract_class_version: &str,
189 entry_points: SierraEntryPoints,
190 ) -> Result<ClassHash> {
191 let mut entry_points_by_type = HashMap::new();
192 entry_points_by_type.insert(EntryPointType::External, entry_points.external);
193 entry_points_by_type.insert(EntryPointType::L1Handler, entry_points.l1_handler);
194 entry_points_by_type.insert(EntryPointType::Constructor, entry_points.constructor);
195
196 let contract_definition = json::SierraContractDefinition {
197 abi: abi.into(),
198 sierra_program,
199 contract_class_version: contract_class_version.into(),
200 entry_points_by_type,
201 };
202
203 super::compute_sierra_class_hash(contract_definition)
204 }
205}
206
207/// An unprepared Cairo contract definition.
208///
209/// This type represents a raw, unmodified Cairo contract definition before any
210/// preprocessing for class hash computation. It serves as a type-safe way to
211/// ensure contract definitions go through the proper preparation process.
212///
213/// # Type Safety
214///
215/// This type works together with [`PreparedCairoContractDefinition`] to provide
216/// compile-time guarantees that contract definitions are properly prepared
217/// before being used in class hash computation.
218///
219/// # Implementation
220///
221/// Internally wraps a [`json::CairoContractDefinition`] and can be created from
222/// one using the [`From`] implementation.
223pub struct RawCairoContractDefinition<'a>(json::CairoContractDefinition<'a>);
224
225impl<'a> From<json::CairoContractDefinition<'a>> for RawCairoContractDefinition<'a> {
226 fn from(value: json::CairoContractDefinition<'a>) -> Self {
227 RawCairoContractDefinition(value)
228 }
229}
230
231impl<'a> RawCairoContractDefinition<'a> {
232 /// Get the inner contract definition
233 pub fn inner(&self) -> &json::CairoContractDefinition<'a> {
234 &self.0
235 }
236}
237
238/// A prepared Cairo contract definition ready for class hash computation.
239///
240/// This type represents a Cairo contract definition that has been preprocessed
241/// and is ready for class hash computation. The preparation process includes:
242/// - Removal of debug information
243/// - Handling of Cairo 0.8+ specific attributes
244/// - Proper formatting of named tuple types
245///
246/// # Type Safety
247///
248/// This type works together with [`RawCairoContractDefinition`] to provide
249/// compile-time guarantees that contract definitions are properly prepared
250/// before being used in class hash computation.
251///
252/// # Creation
253///
254/// This type can be created from a [`json::CairoContractDefinition`] using
255/// [`TryFrom`], which internally uses [`prepare_json_contract_definition`] to
256/// ensure all necessary preprocessing steps are applied. The conversion may
257/// fail if the contract definition is invalid or cannot be properly prepared.
258pub struct PreparedCairoContractDefinition<'a>(json::CairoContractDefinition<'a>);
259
260impl<'a> TryFrom<json::CairoContractDefinition<'a>> for PreparedCairoContractDefinition<'a> {
261 type Error = Error;
262
263 fn try_from(value: json::CairoContractDefinition<'a>) -> Result<Self, Self::Error> {
264 prepare_json_contract_definition(RawCairoContractDefinition::from(value))
265 }
266}
267
268impl<'a> PreparedCairoContractDefinition<'a> {
269 /// Get the inner contract definition
270 pub fn inner(&self) -> &json::CairoContractDefinition<'a> {
271 &self.0
272 }
273}
274
275/// Computes the class hash for given Cairo class definition.
276///
277/// The structure of the blob is not strictly defined, so it lives in privacy
278/// under `json` module of this module. The class hash has [official
279/// documentation][starknet-doc] and [cairo-lang
280/// has an implementation][cairo-compute] which is half-python and
281/// half-[cairo][cairo-contract].
282///
283/// Outline of the hashing is:
284///
285/// 1. class definition is serialized with python's [`sort_keys=True`
286/// option][py-sortkeys], then a truncated Keccak256 hash is calculated of
287/// the serialized json
288/// 2. a [hash chain][`HashChain`] construction is used to process in order the
289/// contract entry points, builtins, the truncated keccak hash and bytecodes
290/// 3. each of the hashchains is hash chained together to produce a final class
291/// hash
292///
293/// Hash chain construction is explained at the [official
294/// documentation][starknet-doc], but it's text explanations are much more
295/// complex than the actual implementation in `HashChain`.
296///
297/// [starknet-doc]: https://docs.starknet.io/documentation/architecture_and_concepts/Contracts/class-hash/
298/// [cairo-compute]: https://github.com/starkware-libs/cairo-lang/blob/64a7f6aed9757d3d8d6c28bd972df73272b0cb0a/src/starkware/starknet/core/os/contract_hash.py
299/// [cairo-contract]: https://github.com/starkware-libs/cairo-lang/blob/64a7f6aed9757d3d8d6c28bd972df73272b0cb0a/src/starkware/starknet/core/os/contracts.cairo#L76-L118
300/// [py-sortkeys]: https://github.com/starkware-libs/cairo-lang/blob/64a7f6aed9757d3d8d6c28bd972df73272b0cb0a/src/starkware/starknet/core/os/contract_hash.py#L58-L71
301pub fn compute_cairo_class_hash(
302 contract_definition: RawCairoContractDefinition<'_>,
303) -> Result<ClassHash> {
304 // Prepare the contract definition for class hash computation
305 let contract_definition = prepare_json_contract_definition(contract_definition)?;
306
307 // Compute the truncated Keccak hash of the prepared contract definition
308 let truncated_keccak = compute_cairo_hinted_class_hash(&contract_definition)?;
309
310 const API_VERSION: Felt = Felt::ZERO;
311
312 let mut outer = HashChain::default();
313
314 // This wasn't in the docs, but similarly to contract_state hash, we start with
315 // this 0, so this will yield outer == H(0, 0); However, dissimilarly to
316 // contract_state hash, we do include the number of items in this
317 // class_hash.
318 outer.update(API_VERSION);
319
320 // It is important to process the different entrypoint hashchains in correct
321 // order. Each of the entrypoint lists gets updated into the `outer`
322 // hashchain.
323 //
324 // This implementation doesn't preparse the strings, which makes it a bit more
325 // noisy. Late parsing is made in an attempt to lean on the one big string
326 // allocation we've already got, but these three hash chains could be
327 // constructed at deserialization time.
328 [External, L1Handler, Constructor]
329 .iter()
330 .map(|key| {
331 contract_definition
332 .0
333 .entry_points_by_type
334 .get(key)
335 .unwrap_or(&Vec::new())
336 .iter()
337 // flatten each entry point to get a list of (selector, offset, selector, offset,
338 // ...)
339 .flat_map(|x| [x.selector.0, x.offset.0].into_iter())
340 .fold(HashChain::default(), |mut hc, next| {
341 hc.update(next);
342 hc
343 })
344 })
345 .for_each(|x| outer.update(x.finalize()));
346
347 fn update_hash_chain(mut hc: HashChain, next: Result<Felt, Error>) -> Result<HashChain, Error> {
348 hc.update(next?);
349 Result::<_, Error>::Ok(hc)
350 }
351
352 let builtins = contract_definition
353 .0
354 .program
355 .builtins
356 .iter()
357 .enumerate()
358 .map(|(i, s)| (i, s.as_bytes()))
359 .map(|(i, s)| {
360 Felt::from_be_slice(s).with_context(|| format!("Invalid builtin at index {i}"))
361 })
362 .try_fold(HashChain::default(), update_hash_chain)
363 .context("Failed to process contract_definition.program.builtins")?;
364
365 outer.update(builtins.finalize());
366
367 outer.update(truncated_keccak);
368
369 let bytecodes = contract_definition
370 .0
371 .program
372 .data
373 .iter()
374 .enumerate()
375 .map(|(i, s)| {
376 Felt::from_hex_str(s).with_context(|| format!("Invalid bytecode at index {i}"))
377 })
378 .try_fold(HashChain::default(), update_hash_chain)
379 .context("Failed to process contract_definition.program.data")?;
380
381 outer.update(bytecodes.finalize());
382
383 Ok(ClassHash(outer.finalize()))
384}
385
386/// Prepares a Cairo contract definition for class hash computation by applying
387/// necessary transformations.
388///
389/// This function performs several modifications to ensure compatibility with
390/// Starknet's class hash computation:
391///
392/// 1. Removes the `debug_info` field from the program
393/// 2. Handles Cairo 0.8+ specific attribute fields:
394/// - Removes empty `accessible_scopes` arrays
395/// - Removes `null` `flow_tracking_data` values
396/// 3. Ensures proper spacing in named tuple type definitions for older Cairo
397/// versions
398///
399/// # Arguments
400///
401/// * `contract_definition` - A raw Cairo contract definition to be prepared
402///
403/// # Returns
404///
405/// Returns a `Result` containing the prepared contract definition ready for
406/// class hash computation, or an error if the preparation process fails.
407///
408/// # Note
409///
410/// This preparation step is crucial for maintaining compatibility with the
411/// official Starknet implementation and ensuring consistent class hash
412/// computation across different Cairo versions.
413pub fn prepare_json_contract_definition(
414 contract_definition: RawCairoContractDefinition<'_>,
415) -> Result<PreparedCairoContractDefinition<'_>, Error> {
416 let mut contract_definition = contract_definition.0;
417 contract_definition.program.debug_info = None;
418
419 // Cairo 0.8 added "accessible_scopes" and "flow_tracking_data" attribute
420 // fields, which were not present in older contracts. They present as null /
421 // empty for older contracts and should not be included in the hash
422 // calculation in these cases.
423 //
424 // We therefore check and remove them from the definition before calculating the
425 // hash.
426 contract_definition
427 .program
428 .attributes
429 .iter_mut()
430 .try_for_each(|attr| -> anyhow::Result<()> {
431 let vals = attr
432 .as_object_mut()
433 .context("Program attribute was not an object")?;
434
435 match vals.get_mut("accessible_scopes") {
436 Some(serde_json::Value::Array(array)) => {
437 if array.is_empty() {
438 vals.remove("accessible_scopes");
439 }
440 }
441 Some(_other) => {
442 anyhow::bail!(
443 r#"A program's attribute["accessible_scopes"] was not an array type."#
444 );
445 }
446 None => {}
447 }
448 // We don't know what this type is supposed to be, but if its missing it is
449 // null.
450 if let Some(serde_json::Value::Null) = vals.get_mut("flow_tracking_data") {
451 vals.remove("flow_tracking_data");
452 }
453
454 Ok(())
455 })?;
456
457 fn add_extra_space_to_cairo_named_tuples(value: &mut serde_json::Value) {
458 match value {
459 serde_json::Value::Array(v) => walk_array(v),
460 serde_json::Value::Object(m) => walk_map(m),
461 _ => {}
462 }
463 }
464
465 fn walk_array(array: &mut [serde_json::Value]) {
466 for v in array.iter_mut() {
467 add_extra_space_to_cairo_named_tuples(v);
468 }
469 }
470
471 fn walk_map(object: &mut serde_json::Map<String, serde_json::Value>) {
472 for (k, v) in object.iter_mut() {
473 match v {
474 serde_json::Value::String(s) => {
475 let new_value = add_extra_space_to_named_tuple_type_definition(k, s);
476 if new_value.as_ref() != s {
477 *v = serde_json::Value::String(new_value.into());
478 }
479 }
480 _ => add_extra_space_to_cairo_named_tuples(v),
481 }
482 }
483 }
484
485 fn add_extra_space_to_named_tuple_type_definition<'a>(
486 key: &str,
487 value: &'a str,
488 ) -> std::borrow::Cow<'a, str> {
489 use std::borrow::Cow::*;
490 match key {
491 "cairo_type" | "value" => Owned(add_extra_space_before_colon(value)),
492 _ => Borrowed(value),
493 }
494 }
495
496 fn add_extra_space_before_colon(v: &str) -> String {
497 // This is required because if we receive an already correct ` : `, we will
498 // still "repair" it to ` : ` which we then fix at the end.
499 v.replace(": ", " : ").replace(" :", " :")
500 }
501
502 // Handle a backwards compatibility hack which is required if compiler_version
503 // is not present. See `insert_space` for more details.
504 if contract_definition.program.compiler_version.is_none() {
505 add_extra_space_to_cairo_named_tuples(&mut contract_definition.program.identifiers);
506 add_extra_space_to_cairo_named_tuples(&mut contract_definition.program.reference_manager);
507 }
508
509 Ok(PreparedCairoContractDefinition(contract_definition))
510}
511
512/// Computes the class hash for a Sierra class definition.
513///
514/// This matches the (not very precise) [official documentation][starknet-doc]
515/// and the [cairo-lang implementation][cairo-compute] written in Cairo.
516///
517/// Calculation is somewhat simpler than for Cairo classes, since it does _not_
518/// involve serializing JSON and calculating hashes for the JSON output.
519/// Instead, ABI is handled as a string and all other relevant parts of the
520/// class definition are transformed into Felts and hashed using Poseidon.
521///
522/// [starknet-doc]: https://docs.starknet.io/documentation/architecture_and_concepts/Contracts/class-hash/
523/// [cairo-compute]: https://github.com/starkware-libs/cairo-lang/blob/12ca9e91bbdc8a423c63280949c7e34382792067/src/starkware/starknet/core/os/contract_class/contract_class.cairo#L42
524pub fn compute_sierra_class_hash(
525 contract_definition: json::SierraContractDefinition<'_>,
526) -> Result<ClassHash> {
527 if contract_definition.contract_class_version != "0.1.0" {
528 anyhow::bail!("Unsupported Sierra class version");
529 }
530
531 let mut hash = PoseidonHasher::default();
532
533 const SIERRA_VERSION: Felt = felt_bytes!(b"CONTRACT_CLASS_V0.1.0");
534 hash.write(SIERRA_VERSION.into());
535
536 // It is important to process the different entrypoint hashchains in correct
537 // order. Each of the entrypoint lists gets updated into the `outer`
538 // hashchain.
539 //
540 // This implementation doesn't preparse the strings, which makes it a bit more
541 // noisy. Late parsing is made in an attempt to lean on the one big string
542 // allocation we've already got, but these three hash chains could be
543 // constructed at deserialization time.
544 [External, L1Handler, Constructor]
545 .iter()
546 .map(|key| {
547 contract_definition
548 .entry_points_by_type
549 .get(key)
550 .unwrap_or(&Vec::new())
551 .iter()
552 // flatten each entry point to get a list of (selector, function_idx, selector,
553 // function_idx, ...)
554 .flat_map(|x| [x.selector.0, x.function_idx.into()].into_iter())
555 .fold(PoseidonHasher::default(), |mut hc, next| {
556 hc.write(next.into());
557 hc
558 })
559 })
560 .for_each(|x| hash.write(x.finish()));
561
562 let abi_truncated_keccak = {
563 let mut keccak = sha3::Keccak256::default();
564 keccak.update(contract_definition.abi.as_bytes());
565 truncated_keccak(<[u8; 32]>::from(keccak.finalize()))
566 };
567 hash.write(abi_truncated_keccak.into());
568
569 let program_hash = {
570 let program_hash = contract_definition.sierra_program.iter().fold(
571 PoseidonHasher::default(),
572 |mut hc, next| {
573 hc.write((*next).into());
574 hc
575 },
576 );
577 program_hash.finish()
578 };
579 hash.write(program_hash);
580
581 Ok(ClassHash(hash.finish().into()))
582}
583
584/// Computes a truncated Keccak hash compatible with Starknet's field element
585/// representation.
586///
587/// This function takes a 32-byte Keccak hash and truncates it to ensure it fits
588/// within Starknet's field element size (251 bits) by masking the most
589/// significant bits.
590///
591/// # Arguments
592///
593/// * `plain` - A 32-byte array containing the full Keccak hash
594///
595/// # Returns
596///
597/// Returns a `Felt` containing the truncated hash value.
598///
599/// # Implementation Note
600///
601/// The implementation masks the first byte with 0x03 to ensure the result is
602/// less than the Starknet prime field modulus. This matches the official Cairo
603/// implementation: <https://github.com/starkware-libs/cairo-lang/blob/64a7f6aed9757d3d8d6c28bd972df73272b0cb0a/src/starkware/starknet/public/abi.py#L21-L26>
604pub fn truncated_keccak(mut plain: [u8; 32]) -> Felt {
605 // python code masks with (2**250 - 1) which starts 0x03 and is followed by 31
606 // 0xff in be truncation is needed not to overflow the field element.
607 plain[0] &= 0x03;
608 Felt::from_be_bytes(plain).expect("cannot overflow: smaller than modulus")
609}
610
611/// `std::io::Write` adapter for Keccak256; we don't need the serialized version
612/// in compute_class_hash, but we need the truncated_keccak hash.
613///
614/// When debugging mismatching hashes, it might be useful to check the length of
615/// each before trying to find the wrongly serialized spot. Example length >
616/// 500kB.
617#[derive(Default)]
618struct KeccakWriter(sha3::Keccak256);
619
620impl std::io::Write for KeccakWriter {
621 fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
622 self.0.update(buf);
623 Ok(buf.len())
624 }
625
626 fn flush(&mut self) -> std::io::Result<()> {
627 // noop is fine, we'll finalize after the write phase
628 Ok(())
629 }
630}
631
632/// Starkware doesn't use compact formatting for JSON but default python
633/// formatting. This is required to hash to the same value after sorted
634/// serialization.
635struct PythonDefaultFormatter;
636
637impl serde_json::ser::Formatter for PythonDefaultFormatter {
638 fn begin_array_value<W>(&mut self, writer: &mut W, first: bool) -> std::io::Result<()>
639 where
640 W: ?Sized + std::io::Write,
641 {
642 if first {
643 Ok(())
644 } else {
645 writer.write_all(b", ")
646 }
647 }
648
649 fn begin_object_key<W>(&mut self, writer: &mut W, first: bool) -> std::io::Result<()>
650 where
651 W: ?Sized + std::io::Write,
652 {
653 if first {
654 Ok(())
655 } else {
656 writer.write_all(b", ")
657 }
658 }
659
660 fn begin_object_value<W>(&mut self, writer: &mut W) -> std::io::Result<()>
661 where
662 W: ?Sized + std::io::Write,
663 {
664 writer.write_all(b": ")
665 }
666
667 // Credit: Jonathan Lei from starknet-rs (https://github.com/xJonathanLEI/starknet-rs)`
668 #[inline]
669 fn write_string_fragment<W>(&mut self, writer: &mut W, fragment: &str) -> std::io::Result<()>
670 where
671 W: ?Sized + std::io::Write,
672 {
673 let mut buf = [0, 0];
674
675 for c in fragment.chars() {
676 if c.is_ascii() {
677 writer.write_all(&[c as u8])?;
678 } else {
679 let buf = c.encode_utf16(&mut buf);
680 for i in buf {
681 write!(writer, r"\u{:4x}", i)?;
682 }
683 }
684 }
685
686 Ok(())
687 }
688}
689
690/// Helpers to parse and serialize the parts that compose a Cairo or Sierra
691/// contract
692pub mod json {
693 use std::borrow::Cow;
694 use std::collections::{BTreeMap, HashMap};
695
696 use pathfinder_common::class_definition::{
697 EntryPointType,
698 SelectorAndFunctionIndex,
699 SelectorAndOffset,
700 };
701
702 pub enum ContractDefinition<'a> {
703 Cairo(CairoContractDefinition<'a>),
704 Sierra(SierraContractDefinition<'a>),
705 }
706
707 /// A Sierra contract definition
708 #[derive(serde::Deserialize)]
709 #[serde(deny_unknown_fields)]
710 pub struct SierraContractDefinition<'a> {
711 /// Contract ABI.
712 #[serde(borrow)]
713 pub abi: Cow<'a, str>,
714
715 /// Main program definition.
716 pub sierra_program: Vec<pathfinder_crypto::Felt>,
717
718 // Version
719 #[serde(borrow)]
720 pub contract_class_version: Cow<'a, str>,
721
722 /// The contract entry points
723 pub entry_points_by_type: HashMap<EntryPointType, Vec<SelectorAndFunctionIndex>>,
724 }
725
726 /// Our version of the cairo contract definition used to deserialize and
727 /// re-serialize a modified version for a hash of the contract
728 /// definition.
729 ///
730 /// The implementation uses `serde_json::Value` extensively for the
731 /// unknown/undefined structure, and the correctness of this
732 /// implementation depends on the following features of serde_json:
733 ///
734 /// - feature `raw_value` has to be enabled for the thrown away
735 /// `program.debug_info`
736 /// - feature `preserve_order` has to be disabled, as we want everything
737 /// sorted
738 /// - feature `arbitrary_precision` has to be enabled, as there are big
739 /// integers in the input
740 ///
741 /// It would be much more efficient to have a serde_json::Value which would
742 /// only hold borrowed types.
743 #[derive(serde::Deserialize, serde::Serialize)]
744 #[serde(deny_unknown_fields)]
745 pub struct CairoContractDefinition<'a> {
746 /// Contract ABI, which has no schema definition.
747 pub abi: serde_json::Value,
748
749 /// Main program definition.
750 #[serde(borrow)]
751 pub program: CairoProgram<'a>,
752
753 /// The contract entry points.
754 ///
755 /// These are left out of the re-serialized version with the ordering
756 /// requirement to a Keccak256 hash.
757 #[serde(skip_serializing)]
758 pub entry_points_by_type: HashMap<EntryPointType, Vec<SelectorAndOffset>>,
759 }
760
761 /// A Cairo program definition
762 // It's important that this is ordered alphabetically because the fields need to
763 // be in sorted order for the keccak hashed representation.
764 #[derive(serde::Deserialize, serde::Serialize)]
765 #[serde(deny_unknown_fields)]
766 pub struct CairoProgram<'a> {
767 #[serde(skip_serializing_if = "Vec::is_empty", default)]
768 pub attributes: Vec<serde_json::Value>,
769
770 #[serde(borrow)]
771 pub builtins: Vec<Cow<'a, str>>,
772
773 // Added in Starknet 0.10, so we have to handle this not being present.
774 #[serde(borrow, skip_serializing_if = "Option::is_none")]
775 pub compiler_version: Option<Cow<'a, str>>,
776
777 #[serde(borrow)]
778 pub data: Vec<Cow<'a, str>>,
779
780 #[serde(borrow)]
781 pub debug_info: Option<&'a serde_json::value::RawValue>,
782
783 // Important that this is ordered by the numeric keys, not lexicographically
784 pub hints: BTreeMap<u64, Vec<serde_json::Value>>,
785
786 pub identifiers: serde_json::Value,
787
788 #[serde(borrow)]
789 pub main_scope: Cow<'a, str>,
790
791 // Unlike most other integers, this one is hex string. We don't need to interpret it,
792 // it just needs to be part of the hashed output.
793 #[serde(borrow)]
794 pub prime: Cow<'a, str>,
795
796 pub reference_manager: serde_json::Value,
797 }
798
799 #[cfg(test)]
800 mod test_vectors {
801 use pathfinder_common::macro_prelude::*;
802 use starknet_gateway_test_fixtures::class_definitions::*;
803
804 use super::super::{compute_class_hash, ComputedClassHash};
805
806 #[tokio::test]
807 async fn first() {
808 let hash = compute_class_hash(INTEGRATION_TEST).unwrap();
809
810 assert_eq!(
811 hash,
812 ComputedClassHash::Cairo(class_hash!(
813 "0x031da92cf5f54bcb81b447e219e2b791b23f3052d12b6c9abd04ff2e5626576"
814 ))
815 );
816 }
817
818 #[test]
819 fn second() {
820 let hash = super::super::compute_class_hash(CONTRACT_DEFINITION).unwrap();
821
822 assert_eq!(
823 hash,
824 ComputedClassHash::Cairo(class_hash!(
825 "0x50b2148c0d782914e0b12a1a32abe5e398930b7e914f82c65cb7afce0a0ab9b"
826 ))
827 );
828 }
829
830 #[tokio::test]
831 async fn genesis_contract() {
832 let hash = compute_class_hash(GOERLI_GENESIS).unwrap();
833
834 assert_eq!(
835 hash,
836 ComputedClassHash::Cairo(class_hash!(
837 "0x10455c752b86932ce552f2b0fe81a880746649b9aee7e0d842bf3f52378f9f8"
838 ))
839 );
840 }
841
842 #[tokio::test]
843 async fn cairo_0_8() {
844 // Cairo 0.8 update broke our class hash calculation by adding new attribute
845 // fields (which we now need to ignore if empty).
846
847 let expected = ComputedClassHash::Cairo(class_hash!(
848 "056b96c1d1bbfa01af44b465763d1b71150fa00c6c9d54c3947f57e979ff68c3"
849 ));
850
851 // Known contract which triggered a hash mismatch failure.
852 let extract = tokio::task::spawn_blocking(move || -> anyhow::Result<_> {
853 let hash = compute_class_hash(CAIRO_0_8_NEW_ATTRIBUTES)?;
854 Ok(hash)
855 });
856 let calculated_hash = extract.await.unwrap().unwrap();
857
858 assert_eq!(calculated_hash, expected);
859 }
860
861 #[tokio::test]
862 async fn cairo_0_10() {
863 // Contract whose class triggered a deserialization issue because of the new
864 // `compiler_version` property.
865 let hash = compute_class_hash(CAIRO_0_10_COMPILER_VERSION).unwrap();
866
867 assert_eq!(
868 hash,
869 ComputedClassHash::Cairo(class_hash!(
870 "0xa69700a89b1fa3648adff91c438b79c75f7dcb0f4798938a144cce221639d6"
871 ))
872 );
873 }
874
875 #[tokio::test]
876 async fn cairo_0_10_part_2() {
877 // Contract who's class contains `compiler_version` property as well as
878 // `cairo_type` with tuple values. These tuple values require a
879 // space to be injected in order to achieve the correct hash.
880 let hash = compute_class_hash(CAIRO_0_10_TUPLES_INTEGRATION).unwrap();
881
882 assert_eq!(
883 hash,
884 ComputedClassHash::Cairo(class_hash!(
885 "0x542460935cea188d21e752d8459d82d60497866aaad21f873cbb61621d34f7f"
886 ))
887 );
888 }
889
890 #[tokio::test]
891 async fn cairo_0_10_part_3() {
892 // Contract who's class contains `compiler_version` property as well as
893 // `cairo_type` with tuple values. These tuple values require a
894 // space to be injected in order to achieve the correct hash.
895 let hash = compute_class_hash(CAIRO_0_10_TUPLES_GOERLI).unwrap();
896
897 assert_eq!(
898 hash,
899 ComputedClassHash::Cairo(class_hash!(
900 "0x66af14b94491ba4e2aea1117acf0a3155c53d92fdfd9c1f1dcac90dc2d30157"
901 ))
902 );
903 }
904
905 #[tokio::test]
906 async fn cairo_0_11_sierra() {
907 let hash = compute_class_hash(CAIRO_0_11_SIERRA).unwrap();
908
909 assert_eq!(
910 hash,
911 ComputedClassHash::Sierra(class_hash!(
912 "0x4e70b19333ae94bd958625f7b61ce9eec631653597e68645e13780061b2136c"
913 ))
914 )
915 }
916
917 #[tokio::test]
918 async fn cairo_0_11_with_decimal_entry_point_offset() {
919 let hash = compute_class_hash(CAIRO_0_11_WITH_DECIMAL_ENTRY_POINT_OFFSET).unwrap();
920
921 assert_eq!(
922 hash,
923 ComputedClassHash::Cairo(class_hash!(
924 "0x0484c163658bcce5f9916f486171ac60143a92897533aa7ff7ac800b16c63311"
925 ))
926 )
927 }
928 }
929
930 #[cfg(test)]
931 mod test_serde_features {
932 #[test]
933 fn serde_json_value_sorts_maps() {
934 // this property is leaned on and the default implementation of serde_json works
935 // like this. serde_json has a feature called "preserve_order" which
936 // could get enabled by accident, and it would destroy the ability
937 // to compute_class_hash.
938
939 let input = r#"{"foo": 1, "bar": 2}"#;
940 let parsed = serde_json::from_str::<serde_json::Value>(input).unwrap();
941 let output = serde_json::to_string(&parsed).unwrap();
942
943 assert_eq!(output, r#"{"bar":2,"foo":1}"#);
944 }
945
946 #[test]
947 fn serde_json_has_arbitrary_precision() {
948 // the json has 251-bit ints, python handles them out of box, serde_json
949 // requires feature "arbitrary_precision".
950
951 // this is 2**256 - 1
952 let input = r#"{"foo":115792089237316195423570985008687907853269984665640564039457584007913129639935}"#;
953
954 let output =
955 serde_json::to_string(&serde_json::from_str::<serde_json::Value>(input).unwrap())
956 .unwrap();
957
958 assert_eq!(input, output);
959 }
960
961 #[test]
962 fn serde_json_has_raw_value() {
963 // raw value is needed for others but here for completeness; this shouldn't
964 // compile if you the feature wasn't enabled.
965
966 #[derive(serde::Deserialize, serde::Serialize)]
967 struct Program<'a> {
968 #[serde(borrow)]
969 debug_info: Option<&'a serde_json::value::RawValue>,
970 }
971
972 let mut input = serde_json::from_str::<Program<'_>>(
973 r#"{"debug_info": {"long": {"tree": { "which": ["we dont", "care", "about", 0] }}}}"#,
974 ).unwrap();
975
976 input.debug_info = None;
977
978 let output = serde_json::to_string(&input).unwrap();
979
980 assert_eq!(output, r#"{"debug_info":null}"#);
981 }
982 }
983}
984
985#[cfg(test)]
986mod tests {
987 #[test]
988 fn truncated_keccak_matches_pythonic() {
989 use pathfinder_common::felt;
990 use sha3::{Digest, Keccak256};
991
992 use super::truncated_keccak;
993 let all_set = Keccak256::digest([0xffu8; 32]);
994 assert!(all_set[0] > 0xf);
995 let truncated = truncated_keccak(all_set.into());
996 assert_eq!(
997 truncated,
998 felt!("0x1c584056064687e149968cbab758a3376d22aedc6a55823d1b3ecbee81b8fb9")
999 );
1000 }
1001}