use anyhow::{Context, Error, Result};
use pathfinder_common::class_definition::EntryPointType::*;
use pathfinder_common::{felt_bytes, ClassHash};
use pathfinder_crypto::hash::{HashChain, PoseidonHasher};
use pathfinder_crypto::Felt;
use serde::Serialize;
use sha3::Digest;
#[derive(Debug, PartialEq)]
pub enum ComputedClassHash {
Cairo(ClassHash),
Sierra(ClassHash),
}
impl ComputedClassHash {
pub fn hash(&self) -> ClassHash {
match self {
ComputedClassHash::Cairo(h) => *h,
ComputedClassHash::Sierra(h) => *h,
}
}
}
pub fn compute_class_hash(contract_definition_dump: &[u8]) -> Result<ComputedClassHash> {
let contract_definition = parse_contract_definition(contract_definition_dump)
.context("Failed to parse contract definition")?;
match contract_definition {
json::ContractDefinition::Sierra(definition) => compute_sierra_class_hash(definition)
.map(ComputedClassHash::Sierra)
.context("Compute class hash"),
json::ContractDefinition::Cairo(definition) => compute_cairo_class_hash(definition.into())
.map(ComputedClassHash::Cairo)
.context("Compute class hash"),
}
}
pub fn compute_cairo_hinted_class_hash(
contract_definition: &PreparedCairoContractDefinition<'_>,
) -> Result<Felt> {
use std::io::Write;
let mut string_buffer = vec![];
let mut ser =
serde_json::Serializer::with_formatter(&mut string_buffer, PythonDefaultFormatter);
contract_definition
.0
.serialize(&mut ser)
.context("Serializing contract_definition for Keccak256")?;
let raw_json_output = String::from_utf8(string_buffer).expect("Invalid UTF-8");
let mut keccak_writer = KeccakWriter::default();
keccak_writer
.write_all(raw_json_output.as_bytes())
.expect("Failed to write to KeccakWriter");
let KeccakWriter(hash) = keccak_writer;
Ok(truncated_keccak(<[u8; 32]>::from(hash.finalize())))
}
pub fn parse_contract_definition(
contract_definition_dump: &[u8],
) -> serde_json::Result<json::ContractDefinition<'_>> {
serde_json::from_slice::<json::SierraContractDefinition<'_>>(contract_definition_dump)
.map(json::ContractDefinition::Sierra)
.or_else(|_| {
serde_json::from_slice::<json::CairoContractDefinition<'_>>(contract_definition_dump)
.map(json::ContractDefinition::Cairo)
})
}
pub mod from_parts {
use std::collections::HashMap;
use anyhow::Result;
use pathfinder_common::class_definition::{
EntryPointType,
SelectorAndOffset,
SierraEntryPoints,
};
use pathfinder_common::ClassHash;
use pathfinder_crypto::Felt;
use super::json;
pub fn compute_cairo_class_hash(
abi: &[u8],
program: &[u8],
external_entry_points: Vec<SelectorAndOffset>,
l1_handler_entry_points: Vec<SelectorAndOffset>,
constructor_entry_points: Vec<SelectorAndOffset>,
) -> Result<ClassHash> {
let mut entry_points_by_type = HashMap::new();
entry_points_by_type.insert(EntryPointType::External, external_entry_points);
entry_points_by_type.insert(EntryPointType::L1Handler, l1_handler_entry_points);
entry_points_by_type.insert(EntryPointType::Constructor, constructor_entry_points);
let contract_definition = json::CairoContractDefinition {
abi: serde_json::from_slice(abi)?,
program: serde_json::from_slice(program)?,
entry_points_by_type,
};
super::compute_cairo_class_hash(contract_definition.into())
}
pub fn compute_sierra_class_hash(
abi: &str,
sierra_program: Vec<Felt>,
contract_class_version: &str,
entry_points: SierraEntryPoints,
) -> Result<ClassHash> {
let mut entry_points_by_type = HashMap::new();
entry_points_by_type.insert(EntryPointType::External, entry_points.external);
entry_points_by_type.insert(EntryPointType::L1Handler, entry_points.l1_handler);
entry_points_by_type.insert(EntryPointType::Constructor, entry_points.constructor);
let contract_definition = json::SierraContractDefinition {
abi: abi.into(),
sierra_program,
contract_class_version: contract_class_version.into(),
entry_points_by_type,
};
super::compute_sierra_class_hash(contract_definition)
}
}
pub struct RawCairoContractDefinition<'a>(json::CairoContractDefinition<'a>);
impl<'a> From<json::CairoContractDefinition<'a>> for RawCairoContractDefinition<'a> {
fn from(value: json::CairoContractDefinition<'a>) -> Self {
RawCairoContractDefinition(value)
}
}
impl<'a> RawCairoContractDefinition<'a> {
pub fn inner(&self) -> &json::CairoContractDefinition<'a> {
&self.0
}
}
pub struct PreparedCairoContractDefinition<'a>(json::CairoContractDefinition<'a>);
impl<'a> TryFrom<json::CairoContractDefinition<'a>> for PreparedCairoContractDefinition<'a> {
type Error = Error;
fn try_from(value: json::CairoContractDefinition<'a>) -> Result<Self, Self::Error> {
prepare_json_contract_definition(RawCairoContractDefinition::from(value))
}
}
impl<'a> PreparedCairoContractDefinition<'a> {
pub fn inner(&self) -> &json::CairoContractDefinition<'a> {
&self.0
}
}
pub fn compute_cairo_class_hash(
contract_definition: RawCairoContractDefinition<'_>,
) -> Result<ClassHash> {
let contract_definition = prepare_json_contract_definition(contract_definition)?;
let truncated_keccak = compute_cairo_hinted_class_hash(&contract_definition)?;
const API_VERSION: Felt = Felt::ZERO;
let mut outer = HashChain::default();
outer.update(API_VERSION);
[External, L1Handler, Constructor]
.iter()
.map(|key| {
contract_definition
.0
.entry_points_by_type
.get(key)
.unwrap_or(&Vec::new())
.iter()
.flat_map(|x| [x.selector.0, x.offset.0].into_iter())
.fold(HashChain::default(), |mut hc, next| {
hc.update(next);
hc
})
})
.for_each(|x| outer.update(x.finalize()));
fn update_hash_chain(mut hc: HashChain, next: Result<Felt, Error>) -> Result<HashChain, Error> {
hc.update(next?);
Result::<_, Error>::Ok(hc)
}
let builtins = contract_definition
.0
.program
.builtins
.iter()
.enumerate()
.map(|(i, s)| (i, s.as_bytes()))
.map(|(i, s)| {
Felt::from_be_slice(s).with_context(|| format!("Invalid builtin at index {i}"))
})
.try_fold(HashChain::default(), update_hash_chain)
.context("Failed to process contract_definition.program.builtins")?;
outer.update(builtins.finalize());
outer.update(truncated_keccak);
let bytecodes = contract_definition
.0
.program
.data
.iter()
.enumerate()
.map(|(i, s)| {
Felt::from_hex_str(s).with_context(|| format!("Invalid bytecode at index {i}"))
})
.try_fold(HashChain::default(), update_hash_chain)
.context("Failed to process contract_definition.program.data")?;
outer.update(bytecodes.finalize());
Ok(ClassHash(outer.finalize()))
}
pub fn prepare_json_contract_definition(
contract_definition: RawCairoContractDefinition<'_>,
) -> Result<PreparedCairoContractDefinition<'_>, Error> {
let mut contract_definition = contract_definition.0;
contract_definition.program.debug_info = None;
contract_definition
.program
.attributes
.iter_mut()
.try_for_each(|attr| -> anyhow::Result<()> {
let vals = attr
.as_object_mut()
.context("Program attribute was not an object")?;
match vals.get_mut("accessible_scopes") {
Some(serde_json::Value::Array(array)) => {
if array.is_empty() {
vals.remove("accessible_scopes");
}
}
Some(_other) => {
anyhow::bail!(
r#"A program's attribute["accessible_scopes"] was not an array type."#
);
}
None => {}
}
if let Some(serde_json::Value::Null) = vals.get_mut("flow_tracking_data") {
vals.remove("flow_tracking_data");
}
Ok(())
})?;
fn add_extra_space_to_cairo_named_tuples(value: &mut serde_json::Value) {
match value {
serde_json::Value::Array(v) => walk_array(v),
serde_json::Value::Object(m) => walk_map(m),
_ => {}
}
}
fn walk_array(array: &mut [serde_json::Value]) {
for v in array.iter_mut() {
add_extra_space_to_cairo_named_tuples(v);
}
}
fn walk_map(object: &mut serde_json::Map<String, serde_json::Value>) {
for (k, v) in object.iter_mut() {
match v {
serde_json::Value::String(s) => {
let new_value = add_extra_space_to_named_tuple_type_definition(k, s);
if new_value.as_ref() != s {
*v = serde_json::Value::String(new_value.into());
}
}
_ => add_extra_space_to_cairo_named_tuples(v),
}
}
}
fn add_extra_space_to_named_tuple_type_definition<'a>(
key: &str,
value: &'a str,
) -> std::borrow::Cow<'a, str> {
use std::borrow::Cow::*;
match key {
"cairo_type" | "value" => Owned(add_extra_space_before_colon(value)),
_ => Borrowed(value),
}
}
fn add_extra_space_before_colon(v: &str) -> String {
v.replace(": ", " : ").replace(" :", " :")
}
if contract_definition.program.compiler_version.is_none() {
add_extra_space_to_cairo_named_tuples(&mut contract_definition.program.identifiers);
add_extra_space_to_cairo_named_tuples(&mut contract_definition.program.reference_manager);
}
Ok(PreparedCairoContractDefinition(contract_definition))
}
pub fn compute_sierra_class_hash(
contract_definition: json::SierraContractDefinition<'_>,
) -> Result<ClassHash> {
if contract_definition.contract_class_version != "0.1.0" {
anyhow::bail!("Unsupported Sierra class version");
}
let mut hash = PoseidonHasher::default();
const SIERRA_VERSION: Felt = felt_bytes!(b"CONTRACT_CLASS_V0.1.0");
hash.write(SIERRA_VERSION.into());
[External, L1Handler, Constructor]
.iter()
.map(|key| {
contract_definition
.entry_points_by_type
.get(key)
.unwrap_or(&Vec::new())
.iter()
.flat_map(|x| [x.selector.0, x.function_idx.into()].into_iter())
.fold(PoseidonHasher::default(), |mut hc, next| {
hc.write(next.into());
hc
})
})
.for_each(|x| hash.write(x.finish()));
let abi_truncated_keccak = {
let mut keccak = sha3::Keccak256::default();
keccak.update(contract_definition.abi.as_bytes());
truncated_keccak(<[u8; 32]>::from(keccak.finalize()))
};
hash.write(abi_truncated_keccak.into());
let program_hash = {
let program_hash = contract_definition.sierra_program.iter().fold(
PoseidonHasher::default(),
|mut hc, next| {
hc.write((*next).into());
hc
},
);
program_hash.finish()
};
hash.write(program_hash);
Ok(ClassHash(hash.finish().into()))
}
pub fn truncated_keccak(mut plain: [u8; 32]) -> Felt {
plain[0] &= 0x03;
Felt::from_be_bytes(plain).expect("cannot overflow: smaller than modulus")
}
#[derive(Default)]
struct KeccakWriter(sha3::Keccak256);
impl std::io::Write for KeccakWriter {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self.0.update(buf);
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}
struct PythonDefaultFormatter;
impl serde_json::ser::Formatter for PythonDefaultFormatter {
fn begin_array_value<W>(&mut self, writer: &mut W, first: bool) -> std::io::Result<()>
where
W: ?Sized + std::io::Write,
{
if first {
Ok(())
} else {
writer.write_all(b", ")
}
}
fn begin_object_key<W>(&mut self, writer: &mut W, first: bool) -> std::io::Result<()>
where
W: ?Sized + std::io::Write,
{
if first {
Ok(())
} else {
writer.write_all(b", ")
}
}
fn begin_object_value<W>(&mut self, writer: &mut W) -> std::io::Result<()>
where
W: ?Sized + std::io::Write,
{
writer.write_all(b": ")
}
#[inline]
fn write_string_fragment<W>(&mut self, writer: &mut W, fragment: &str) -> std::io::Result<()>
where
W: ?Sized + std::io::Write,
{
let mut buf = [0, 0];
for c in fragment.chars() {
if c.is_ascii() {
writer.write_all(&[c as u8])?;
} else {
let buf = c.encode_utf16(&mut buf);
for i in buf {
write!(writer, r"\u{:4x}", i)?;
}
}
}
Ok(())
}
}
pub mod json {
use std::borrow::Cow;
use std::collections::{BTreeMap, HashMap};
use pathfinder_common::class_definition::{
EntryPointType,
SelectorAndFunctionIndex,
SelectorAndOffset,
};
pub enum ContractDefinition<'a> {
Cairo(CairoContractDefinition<'a>),
Sierra(SierraContractDefinition<'a>),
}
#[derive(serde::Deserialize)]
#[serde(deny_unknown_fields)]
pub struct SierraContractDefinition<'a> {
#[serde(borrow)]
pub abi: Cow<'a, str>,
pub sierra_program: Vec<pathfinder_crypto::Felt>,
#[serde(borrow)]
pub contract_class_version: Cow<'a, str>,
pub entry_points_by_type: HashMap<EntryPointType, Vec<SelectorAndFunctionIndex>>,
}
#[derive(serde::Deserialize, serde::Serialize)]
#[serde(deny_unknown_fields)]
pub struct CairoContractDefinition<'a> {
pub abi: serde_json::Value,
#[serde(borrow)]
pub program: CairoProgram<'a>,
#[serde(skip_serializing)]
pub entry_points_by_type: HashMap<EntryPointType, Vec<SelectorAndOffset>>,
}
#[derive(serde::Deserialize, serde::Serialize)]
#[serde(deny_unknown_fields)]
pub struct CairoProgram<'a> {
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub attributes: Vec<serde_json::Value>,
#[serde(borrow)]
pub builtins: Vec<Cow<'a, str>>,
#[serde(borrow, skip_serializing_if = "Option::is_none")]
pub compiler_version: Option<Cow<'a, str>>,
#[serde(borrow)]
pub data: Vec<Cow<'a, str>>,
#[serde(borrow)]
pub debug_info: Option<&'a serde_json::value::RawValue>,
pub hints: BTreeMap<u64, Vec<serde_json::Value>>,
pub identifiers: serde_json::Value,
#[serde(borrow)]
pub main_scope: Cow<'a, str>,
#[serde(borrow)]
pub prime: Cow<'a, str>,
pub reference_manager: serde_json::Value,
}
#[cfg(test)]
mod test_vectors {
use pathfinder_common::macro_prelude::*;
use starknet_gateway_test_fixtures::class_definitions::*;
use super::super::{compute_class_hash, ComputedClassHash};
#[tokio::test]
async fn first() {
let hash = compute_class_hash(INTEGRATION_TEST).unwrap();
assert_eq!(
hash,
ComputedClassHash::Cairo(class_hash!(
"0x031da92cf5f54bcb81b447e219e2b791b23f3052d12b6c9abd04ff2e5626576"
))
);
}
#[test]
fn second() {
let hash = super::super::compute_class_hash(CONTRACT_DEFINITION).unwrap();
assert_eq!(
hash,
ComputedClassHash::Cairo(class_hash!(
"0x50b2148c0d782914e0b12a1a32abe5e398930b7e914f82c65cb7afce0a0ab9b"
))
);
}
#[tokio::test]
async fn genesis_contract() {
let hash = compute_class_hash(GOERLI_GENESIS).unwrap();
assert_eq!(
hash,
ComputedClassHash::Cairo(class_hash!(
"0x10455c752b86932ce552f2b0fe81a880746649b9aee7e0d842bf3f52378f9f8"
))
);
}
#[tokio::test]
async fn cairo_0_8() {
let expected = ComputedClassHash::Cairo(class_hash!(
"056b96c1d1bbfa01af44b465763d1b71150fa00c6c9d54c3947f57e979ff68c3"
));
let extract = tokio::task::spawn_blocking(move || -> anyhow::Result<_> {
let hash = compute_class_hash(CAIRO_0_8_NEW_ATTRIBUTES)?;
Ok(hash)
});
let calculated_hash = extract.await.unwrap().unwrap();
assert_eq!(calculated_hash, expected);
}
#[tokio::test]
async fn cairo_0_10() {
let hash = compute_class_hash(CAIRO_0_10_COMPILER_VERSION).unwrap();
assert_eq!(
hash,
ComputedClassHash::Cairo(class_hash!(
"0xa69700a89b1fa3648adff91c438b79c75f7dcb0f4798938a144cce221639d6"
))
);
}
#[tokio::test]
async fn cairo_0_10_part_2() {
let hash = compute_class_hash(CAIRO_0_10_TUPLES_INTEGRATION).unwrap();
assert_eq!(
hash,
ComputedClassHash::Cairo(class_hash!(
"0x542460935cea188d21e752d8459d82d60497866aaad21f873cbb61621d34f7f"
))
);
}
#[tokio::test]
async fn cairo_0_10_part_3() {
let hash = compute_class_hash(CAIRO_0_10_TUPLES_GOERLI).unwrap();
assert_eq!(
hash,
ComputedClassHash::Cairo(class_hash!(
"0x66af14b94491ba4e2aea1117acf0a3155c53d92fdfd9c1f1dcac90dc2d30157"
))
);
}
#[tokio::test]
async fn cairo_0_11_sierra() {
let hash = compute_class_hash(CAIRO_0_11_SIERRA).unwrap();
assert_eq!(
hash,
ComputedClassHash::Sierra(class_hash!(
"0x4e70b19333ae94bd958625f7b61ce9eec631653597e68645e13780061b2136c"
))
)
}
#[tokio::test]
async fn cairo_0_11_with_decimal_entry_point_offset() {
let hash = compute_class_hash(CAIRO_0_11_WITH_DECIMAL_ENTRY_POINT_OFFSET).unwrap();
assert_eq!(
hash,
ComputedClassHash::Cairo(class_hash!(
"0x0484c163658bcce5f9916f486171ac60143a92897533aa7ff7ac800b16c63311"
))
)
}
}
#[cfg(test)]
mod test_serde_features {
#[test]
fn serde_json_value_sorts_maps() {
let input = r#"{"foo": 1, "bar": 2}"#;
let parsed = serde_json::from_str::<serde_json::Value>(input).unwrap();
let output = serde_json::to_string(&parsed).unwrap();
assert_eq!(output, r#"{"bar":2,"foo":1}"#);
}
#[test]
fn serde_json_has_arbitrary_precision() {
let input = r#"{"foo":115792089237316195423570985008687907853269984665640564039457584007913129639935}"#;
let output =
serde_json::to_string(&serde_json::from_str::<serde_json::Value>(input).unwrap())
.unwrap();
assert_eq!(input, output);
}
#[test]
fn serde_json_has_raw_value() {
#[derive(serde::Deserialize, serde::Serialize)]
struct Program<'a> {
#[serde(borrow)]
debug_info: Option<&'a serde_json::value::RawValue>,
}
let mut input = serde_json::from_str::<Program<'_>>(
r#"{"debug_info": {"long": {"tree": { "which": ["we dont", "care", "about", 0] }}}}"#,
).unwrap();
input.debug_info = None;
let output = serde_json::to_string(&input).unwrap();
assert_eq!(output, r#"{"debug_info":null}"#);
}
}
}
#[cfg(test)]
mod tests {
#[test]
fn truncated_keccak_matches_pythonic() {
use pathfinder_common::felt;
use sha3::{Digest, Keccak256};
use super::truncated_keccak;
let all_set = Keccak256::digest([0xffu8; 32]);
assert!(all_set[0] > 0xf);
let truncated = truncated_keccak(all_set.into());
assert_eq!(
truncated,
felt!("0x1c584056064687e149968cbab758a3376d22aedc6a55823d1b3ecbee81b8fb9")
);
}
}