jam_types/
types.rs

1use core::{
2	borrow::Borrow,
3	ops::{Add, AddAssign},
4};
5
6use crate::simple::{Hash, MaxDependencies, MaxExtrinsics, MaxImports, MaxWorkItems};
7
8use super::*;
9use simple::OpaqueBlsPublic;
10
11/// A `Wrap` implementation provides a type constructor which "wraps" a type.
12pub trait Wrap {
13	/// Wrap `T`.
14	type Wrap<T>: Borrow<T>;
15}
16
17/// No-op `Wrap`.
18// These traits are derived to avoid running into issues with Rust's poor handling of derive with
19// generic parameters; see https://github.com/rust-lang/rust/issues/26925
20#[derive(Clone, Debug, PartialEq, Eq)]
21pub struct NoWrap;
22
23impl Wrap for NoWrap {
24	type Wrap<T> = T;
25}
26
27/// Plain-old-data struct of the same length and layout to `ValKeyset` struct. This does not
28/// bring in any cryptography.
29#[derive(Copy, Clone, Encode, Decode, Debug, Eq, PartialEq)]
30pub struct OpaqueValKeyset {
31	/// The opaque Bandersnatch public key.
32	pub bandersnatch: OpaqueBandersnatchPublic,
33	/// The opaque Ed25519 public key.
34	pub ed25519: OpaqueEd25519Public,
35	/// The opaque BLS public key.
36	pub bls: OpaqueBlsPublic,
37	/// The opaque metadata.
38	pub metadata: OpaqueValidatorMetadata,
39}
40
41impl Default for OpaqueValKeyset {
42	fn default() -> Self {
43		Self {
44			bandersnatch: OpaqueBandersnatchPublic::zero(),
45			ed25519: OpaqueEd25519Public::zero(),
46			bls: OpaqueBlsPublic::zero(),
47			metadata: OpaqueValidatorMetadata::zero(),
48		}
49	}
50}
51
52/// The opaque keys for each validator.
53pub type OpaqueValKeysets = FixedVec<OpaqueValKeyset, ValCount>;
54
55/// Reference to a sequence of import segments, which when combined with an index forms a
56/// commitment to a specific segment of data.
57#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
58pub enum RootIdentifier {
59	/// Direct cryptographic commitment to the export-segments tree root.
60	Direct(SegmentTreeRoot),
61	/// Indirect reference to the export-segments tree root via a hash of the work-package which
62	/// resulted in it.
63	Indirect(WorkPackageHash),
64}
65
66impl From<SegmentTreeRoot> for RootIdentifier {
67	fn from(root: SegmentTreeRoot) -> Self {
68		Self::Direct(root)
69	}
70}
71impl From<WorkPackageHash> for RootIdentifier {
72	fn from(hash: WorkPackageHash) -> Self {
73		Self::Indirect(hash)
74	}
75}
76impl TryFrom<RootIdentifier> for SegmentTreeRoot {
77	type Error = WorkPackageHash;
78	fn try_from(root: RootIdentifier) -> Result<Self, Self::Error> {
79		match root {
80			RootIdentifier::Direct(root) => Ok(root),
81			RootIdentifier::Indirect(hash) => Err(hash),
82		}
83	}
84}
85
86/// Import segments specification, which identifies a single exported segment.
87#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
88pub struct ImportSpec {
89	/// The identifier of a series of exported segments.
90	pub root: RootIdentifier,
91	/// The index into the identified series of exported segments.
92	pub index: u16,
93}
94
95impl Encode for ImportSpec {
96	fn encode_to<T: codec::Output + ?Sized>(&self, dest: &mut T) {
97		let off = match &self.root {
98			RootIdentifier::Direct(root) => {
99				root.encode_to(dest);
100				0
101			},
102			RootIdentifier::Indirect(hash) => {
103				hash.encode_to(dest);
104				1 << 15
105			},
106		};
107		(self.index + off).encode_to(dest);
108	}
109}
110
111impl Decode for ImportSpec {
112	fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
113		let h = Hash::decode(input)?;
114		let i = u16::decode(input)?;
115		let root = if i & (1 << 15) == 0 {
116			SegmentTreeRoot::from(h).into()
117		} else {
118			WorkPackageHash::from(h).into()
119		};
120		Ok(Self { root, index: i & !(1 << 15) })
121	}
122
123	fn encoded_fixed_size() -> Option<usize> {
124		Some(core::mem::size_of::<Hash>() + core::mem::size_of::<u16>())
125	}
126}
127
128impl MaxEncodedLen for ImportSpec {
129	fn max_encoded_len() -> usize {
130		Hash::max_encoded_len() + u16::max_encoded_len()
131	}
132}
133
134/// Specification of a single piece of extrinsic data.
135#[derive(Clone, Encode, Decode, MaxEncodedLen, Debug)]
136pub struct ExtrinsicSpec {
137	/// The hash of the extrinsic data.
138	pub hash: ExtrinsicHash,
139	/// The length of the extrinsic data.
140	pub len: u32,
141}
142
143/// Sequence of [WorkItem]s, each wrapped by `W`, within a [WrappedWorkPackage] and thus limited in
144/// length to [max_work_items()].
145pub type WrappedWorkItems<W> = BoundedVec<<W as Wrap>::Wrap<WorkItem>, MaxWorkItems>;
146
147/// Sequence of [WorkItem]s within a [WorkPackage] and thus limited in length to
148/// [max_work_items()].
149pub type WorkItems = WrappedWorkItems<NoWrap>;
150
151/// A definition of work to be done by the Refinement logic of a service and transformed into a
152/// [WorkOutput] for its Accumulation logic.
153#[derive(Clone, Encode, Decode, MaxEncodedLen, Debug)]
154pub struct WorkItem {
155	/// Service identifier to which this work item relates.
156	pub service: ServiceId,
157	/// The service's code hash at the time of reporting. This must be available in-core at the
158	/// time of the lookup-anchor block.
159	pub code_hash: CodeHash,
160	/// Gas limit with which to execute this work item's Refine logic.
161	pub refine_gas_limit: UnsignedGas,
162	/// Gas limit with which to execute this work item's Accumulate logic.
163	pub accumulate_gas_limit: UnsignedGas,
164	/// Number of segments exported by this work item.
165	pub export_count: u16,
166	/// Opaque data passed in to the service's Refinement logic to describe its workload.
167	pub payload: WorkPayload,
168	/// Sequence of imported data segments.
169	pub import_segments: WorkItemImportsVec,
170	/// Additional data available to the service's Refinement logic while doing its workload.
171	pub extrinsics: BoundedVec<ExtrinsicSpec, MaxExtrinsics>,
172}
173
174impl WorkItem {
175	/// Returns the sum of the lengths of the item's extrinsics.
176	///
177	/// Returns `u32::MAX` on overflow.
178	pub fn extrinsic_size(&self) -> u32 {
179		self.extrinsics
180			.iter()
181			.map(|xt| xt.len)
182			.fold(0u32, |sum, len| sum.saturating_add(len))
183	}
184}
185
186/// A sequence of import specifications.
187pub type WorkItemImportsVec = BoundedVec<ImportSpec, MaxImports>;
188
189/// Various pieces of information helpful to contextualize the Refinement process.
190#[derive(Clone, Encode, Decode, Debug, Eq, PartialEq)]
191pub struct RefineContext {
192	/// The most recent header hash of the chain when building. This must be no more than
193	/// `RECENT_BLOCKS` blocks old when reported.
194	pub anchor: HeaderHash,
195	/// Must be state root of block `anchor`. This is checked on-chain when reported.
196	pub state_root: StateRootHash,
197	/// Must be Beefy root of block `anchor`. This is checked on-chain when reported.
198	pub beefy_root: MmrPeakHash,
199	/// The hash of a header of a block which is final. Availability will not succeed unless a
200	/// super-majority of validators have attested to this.
201	/// Preimage `lookup`s will be judged according to this block.
202	pub lookup_anchor: HeaderHash,
203	/// The slot of `lookup_anchor` on the chain. This is checked in availability and the
204	/// report's package will not be made available without it being correct.
205	/// This value must be at least `anchor_slot + 14400`.
206	pub lookup_anchor_slot: Slot,
207	/// Hashes of Work Packages, the reports of which must be reported prior to this one.
208	/// This is checked on-chain when reported.
209	pub prerequisites: VecSet<WorkPackageHash>,
210}
211
212impl MaxEncodedLen for RefineContext {
213	fn max_encoded_len() -> usize {
214		HeaderHash::max_encoded_len() + // anchor
215			StateRootHash::max_encoded_len() + // state_root
216			MmrPeakHash::max_encoded_len() + // beefy_root
217			HeaderHash::max_encoded_len() + // lookup_anchor
218			Slot::max_encoded_len() + // lookup_anchor_slot
219			BoundedVec::<WorkPackageHash, MaxDependencies>::max_encoded_len() // prerequisites
220	}
221}
222
223impl RefineContext {
224	#[doc(hidden)]
225	pub fn largest() -> Self {
226		Self {
227			anchor: Default::default(),
228			state_root: Default::default(),
229			beefy_root: Default::default(),
230			lookup_anchor: Default::default(),
231			lookup_anchor_slot: Slot::MAX,
232			prerequisites: (0..max_dependencies()).map(|i| [i as u8; 32].into()).collect(),
233		}
234	}
235}
236
237/// A work-package, a collection of work-items together with authorization and contextual
238/// information. This is processed _in-core_ with Is-Authorized and Refine logic to produce a
239/// work-report.
240///
241/// The `context` and `items` fields are wrapped by `W`. Use [`WorkPackage`] if you want a plain
242/// work-package struct.
243#[derive(Clone, Debug)]
244pub struct WrappedWorkPackage<W: Wrap> {
245	/// Authorization token.
246	pub authorization: Authorization,
247	/// Service identifier.
248	pub auth_code_host: ServiceId,
249	/// Authorizer.
250	pub authorizer: Authorizer,
251	/// Refinement context.
252	pub context: W::Wrap<RefineContext>,
253	/// Sequence of work items.
254	pub items: W::Wrap<WrappedWorkItems<W>>,
255}
256
257impl<W: Wrap> WrappedWorkPackage<W> {
258	/// Returns the total number of extrinsics.
259	pub fn extrinsic_count(&self) -> u32 {
260		self.items
261			.borrow()
262			.iter()
263			.map(|item| item.borrow().extrinsics.len() as u32)
264			.sum()
265	}
266
267	/// Returns the sum of the lengths of the package's extrinsics.
268	///
269	/// Returns `u32::MAX` on overflow.
270	pub fn saturated_extrinsic_size(&self) -> u32 {
271		self.items
272			.borrow()
273			.iter()
274			.map(|item| item.borrow().extrinsic_size())
275			.fold(0u32, |sum, size| sum.saturating_add(size))
276	}
277
278	/// Returns an iterator over all of the import specs.
279	pub fn import_specs(&self) -> impl Iterator<Item = &ImportSpec> {
280		self.items.borrow().iter().flat_map(|item| item.borrow().import_segments.iter())
281	}
282
283	/// Returns the total number of imports.
284	pub fn import_count(&self) -> u32 {
285		self.items
286			.borrow()
287			.iter()
288			.map(|item| item.borrow().import_segments.len() as u32)
289			.sum()
290	}
291
292	/// Returns the total number of exports.
293	pub fn export_count(&self) -> u32 {
294		self.items.borrow().iter().map(|item| item.borrow().export_count as u32).sum()
295	}
296
297	/// Returns the number of dependencies, including dependencies implied by indirect imports.
298	pub fn dependency_count(&self) -> u32 {
299		let mut indirect = VecSet::new();
300		for spec in self.import_specs() {
301			if let RootIdentifier::Indirect(wph) = &spec.root {
302				indirect.insert(wph);
303			}
304		}
305		(self.context.borrow().prerequisites.len() + indirect.len()) as u32
306	}
307}
308
309impl<W: Wrap> Encode for WrappedWorkPackage<W>
310where
311	W::Wrap<RefineContext>: Encode,
312	W::Wrap<WrappedWorkItems<W>>: Encode,
313{
314	fn encode_to<T: codec::Output + ?Sized>(&self, dest: &mut T) {
315		self.auth_code_host.encode_to(dest);
316		self.authorizer.code_hash.encode_to(dest);
317		self.context.encode_to(dest);
318		self.authorization.encode_to(dest);
319		self.authorizer.config.encode_to(dest);
320		self.items.encode_to(dest);
321	}
322}
323
324impl<W: Wrap> Decode for WrappedWorkPackage<W>
325where
326	W::Wrap<RefineContext>: Decode,
327	W::Wrap<WrappedWorkItems<W>>: Decode,
328{
329	fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
330		let auth_code_host = ServiceId::decode(input)?;
331		let auth_code_hash = CodeHash::decode(input)?;
332		let context = W::Wrap::<RefineContext>::decode(input)?;
333		let authorization = Authorization::decode(input)?;
334		let auth_config = AuthConfig::decode(input)?;
335		let items = W::Wrap::<WrappedWorkItems<W>>::decode(input)?;
336		Ok(Self {
337			authorization,
338			auth_code_host,
339			authorizer: Authorizer { code_hash: auth_code_hash, config: auth_config },
340			context,
341			items,
342		})
343	}
344}
345
346impl<W: Wrap> MaxEncodedLen for WrappedWorkPackage<W>
347where
348	Self: Encode,
349{
350	fn max_encoded_len() -> usize {
351		// This implementation is somewhat naive. It doesn't account for eg the fact that if there
352		// are many import specs, some of the max_input() allowance will be used up by the imported
353		// segments. The implementation of max_bundle_size in jam-std-common relies on this
354		// naivety. If you change this function, ensure you don't break that!
355
356		let mut max = Authorization::max_encoded_len() +
357			ServiceId::max_encoded_len() + // auth_code_host
358			Authorizer::max_encoded_len() +
359			RefineContext::max_encoded_len() +
360			WorkItems::max_encoded_len();
361
362		// In the max expression above, the max_input() bound is effectively applied separately to
363		// authorization, authorizer.config, and item payloads. It actually applies to the combined
364		// size of these (plus imports and extrinsics) -- they cannot _all_ have maximum length.
365		// Account for this.
366		let max_inputs = 2 + max_work_items();
367		max -= (max_inputs - 1) * (max_input() as usize);
368
369		// Similarly, the max_imports() bound has effectively been applied separately to each item,
370		// but it applies to the total number of imports
371		max -= (max_work_items() - 1) * (max_imports() as usize) * ImportSpec::max_encoded_len();
372
373		// And the same for max_extrinsics()
374		max -=
375			(max_work_items() - 1) * (max_extrinsics() as usize) * ExtrinsicSpec::max_encoded_len();
376
377		max
378	}
379}
380
381/// A work-package, a collection of work-items together with authorization and contextual
382/// information. This is processed _in-core_ with Is-Authorized and Refine logic to produce a
383/// work-report.
384pub type WorkPackage = WrappedWorkPackage<NoWrap>;
385
386/// The authorizer tuple which together identifies a means of determining whether a Work Package is
387/// acceptable to execute on a core.
388#[derive(Clone, Encode, Decode, MaxEncodedLen, Debug)]
389pub struct Authorizer {
390	/// Authorization code hash.
391	pub code_hash: CodeHash,
392	/// Configuration blob for the auth logic.
393	pub config: AuthConfig,
394}
395
396impl Authorizer {
397	pub fn any() -> Self {
398		Self { code_hash: CodeHash::zero(), config: Default::default() }
399	}
400
401	pub fn with_concat<R>(&self, f: impl Fn(&[u8]) -> R) -> R {
402		f(&[&self.code_hash.0[..], &self.config[..]].concat()[..])
403	}
404
405	pub fn hash(&self, hasher: impl Fn(&[u8]) -> Hash) -> AuthorizerHash {
406		self.with_concat(hasher).into()
407	}
408}
409
410/// Potential errors encountered during the refinement of a [`WorkItem`].
411///
412/// Although additional errors may be generated internally by the PVM engine,
413/// these are the specific errors designated by the GP for the [`WorkResult`]
414/// and that are eligible to be forwarded to the accumulate process as part
415/// of the [`AccumulateItem`].
416#[derive(Clone, Encode, Decode, MaxEncodedLen, Debug, Eq, PartialEq)]
417#[doc(hidden)]
418pub enum WorkError {
419	/// Gas exhausted (∞).
420	OutOfGas = 1,
421	/// Unexpected termination (☇).
422	Panic = 2,
423	/// Invalid amount of segments exported (⊚).
424	BadExports = 3,
425	/// Work output is too big (⊖).
426	OutputOversize = 4,
427	/// Bad code for the service (`BAD`).
428	///
429	/// This may occur due to an unknown service identifier or unavailable code preimage.
430	BadCode = 5,
431	/// Out of bounds code size (`BIG`).
432	CodeOversize = 6,
433}
434
435/// Fields describing the level of activity imposed on the core to construct the `WorkResult`
436/// output.
437#[derive(Copy, Clone, Encode, Decode, MaxEncodedLen, Debug, Eq, PartialEq, Default)]
438#[doc(hidden)]
439pub struct RefineLoad {
440	/// The amount of gas actually used for this refinement.
441	#[codec(compact)]
442	pub gas_used: UnsignedGas,
443	/// The number of imports made.
444	#[codec(compact)]
445	pub imports: u16,
446	/// The number of extrinsics referenced.
447	#[codec(compact)]
448	pub extrinsic_count: u16,
449	/// The amount of data used in extrinsics.
450	#[codec(compact)]
451	pub extrinsic_size: u32,
452	/// The number of exports made.
453	#[codec(compact)]
454	pub exports: u16,
455}
456
457impl Add for RefineLoad {
458	type Output = Self;
459	fn add(self, rhs: Self) -> Self {
460		Self {
461			gas_used: self.gas_used + rhs.gas_used,
462			imports: self.imports + rhs.imports,
463			extrinsic_count: self.extrinsic_count + rhs.extrinsic_count,
464			extrinsic_size: self.extrinsic_size + rhs.extrinsic_size,
465			exports: self.exports + rhs.exports,
466		}
467	}
468}
469
470impl AddAssign for RefineLoad {
471	fn add_assign(&mut self, rhs: Self) {
472		self.gas_used += rhs.gas_used;
473		self.imports += rhs.imports;
474		self.extrinsic_count += rhs.extrinsic_count;
475		self.extrinsic_size += rhs.extrinsic_size;
476		self.exports += rhs.exports;
477	}
478}
479
480/// The result and surrounding context of a single Refinement operation passed as part of a Work
481/// Report.
482#[derive(Clone, Encode, Decode, MaxEncodedLen, Debug, Eq, PartialEq)]
483#[doc(hidden)]
484pub struct WorkDigest {
485	/// The service whose Refinement gave this result.
486	pub service: ServiceId,
487	/// The service's code hash at the time of reporting. This must be available in-core at the
488	/// time of the lookup-anchor block.
489	pub code_hash: CodeHash,
490	/// The hash of the payload data passed into Refinement which gave this result.
491	pub payload_hash: PayloadHash,
492	/// The amount of gas to be used for the accumulation of this result.
493	pub accumulate_gas: UnsignedGas,
494	/// The result of the Refinement operation itself.
495	#[codec(encoded_as = "CompactRefineResult")]
496	pub result: Result<WorkOutput, WorkError>,
497	/// Information the how much resources the refinement consumed.
498	pub refine_load: RefineLoad,
499}
500
501/// The result and surrounding context of a single Refinement operation passed in to the
502/// Accumulation logic.
503#[derive(Clone, Debug, Encode, Decode)]
504pub struct WorkItemRecord {
505	/// The hash of the work-package in which the work-item which gave this result was placed.
506	pub package: WorkPackageHash,
507	/// The root of the segment tree which was generated by the work-package in which the work-item
508	/// which gave this result was placed.
509	pub exports_root: SegmentTreeRoot,
510	/// The hash of the authorizer which authorized the execution of the work-package
511	/// which generated this result.
512	pub authorizer_hash: AuthorizerHash,
513	/// The hash of the payload data passed into Refinement which gave this result.
514	pub payload: PayloadHash,
515	/// The amount of gas provided to Accumulate by the work-item behind this result.
516	#[codec(compact)]
517	pub gas_limit: UnsignedGas,
518	/// The result of the Refinement operation itself.
519	#[codec(encoded_as = "CompactRefineResult")]
520	pub result: Result<WorkOutput, WorkError>,
521	/// The output of the Is-Authorized logic which authorized the execution of the work-package
522	/// which generated this result.
523	pub auth_output: AuthTrace,
524}
525
526/// A single deferred transfer of balance and/or data, passed in to the Accumulation logic.
527#[derive(Debug, Clone, Encode, Decode, Default)]
528pub struct TransferRecord {
529	/// The index of the service from which the transfer was made.
530	pub source: ServiceId,
531	/// The index of the service which is the target of the transfer.
532	pub destination: ServiceId,
533	/// The balance passed from the `source` service to the `destination`.
534	pub amount: Balance,
535	/// The information passed from the `source` service to the `destination`.
536	pub memo: Memo,
537	/// The gas limit with which the `destination` On Transfer logic may execute in order to
538	/// process this transfer.
539	pub gas_limit: UnsignedGas,
540}
541
542/// Accumulate item.
543#[derive(Debug, Encode, Decode)]
544pub enum AccumulateItem {
545	/// Work item record
546	WorkItem(WorkItemRecord),
547	/// Incoming transfer record
548	Transfer(TransferRecord),
549}
550
551impl From<WorkItemRecord> for AccumulateItem {
552	fn from(w: WorkItemRecord) -> Self {
553		AccumulateItem::WorkItem(w)
554	}
555}
556
557impl From<TransferRecord> for AccumulateItem {
558	fn from(t: TransferRecord) -> Self {
559		AccumulateItem::Transfer(t)
560	}
561}
562
563/// Parameters for the invocation of Accumulate.
564#[derive(Debug, Encode, Decode)]
565#[doc(hidden)]
566pub struct AccumulateParams {
567	/// The current time slot.
568	#[codec(compact)]
569	pub slot: Slot,
570	/// The index of the service being accumulated.
571	#[codec(compact)]
572	pub service_id: ServiceId,
573	/// Number of work-results or transfers to accumulate.
574	#[codec(compact)]
575	pub item_count: u32,
576}
577
578/// Parameters for the invocation of Refine.
579#[derive(Debug, Encode, Decode)]
580#[doc(hidden)]
581pub struct IsAuthorizedParams {
582	/// Core index.
583	#[codec(compact)]
584	pub core: u16,
585}
586
587/// Parameters for the invocation of Refine.
588#[derive(Debug, Encode, Decode)]
589#[doc(hidden)]
590pub struct RefineParams {
591	/// Core index.
592	#[codec(compact)]
593	pub core_index: CoreIndex,
594	/// Work-item index.
595	#[codec(compact)]
596	pub item_index: u32, // u32?
597	/// The index of the service being refined.
598	#[codec(compact)]
599	pub service_id: ServiceId,
600	/// The payload data to process.
601	pub payload: WorkPayload,
602	/// The hash of the Work Package.
603	pub package_hash: WorkPackageHash,
604}
605
606// TODO: @gav Consider moving to jam-node.
607/// Parameters for the invocation of Refine, reference variant.
608#[derive(Debug, Encode)]
609#[doc(hidden)]
610pub struct RefineParamsRef<'a> {
611	/// Core index.
612	#[codec(compact)]
613	pub core_index: CoreIndex,
614	/// Work-item index.
615	#[codec(compact)]
616	pub item_index: u32,
617	/// The index of the service being refined.
618	#[codec(compact)]
619	pub service_id: ServiceId,
620	/// The payload data to process.
621	pub payload: &'a WorkPayload,
622	/// The hash of the Work Package.
623	pub package_hash: &'a WorkPackageHash,
624}
625
626/// Information concerning a particular service's state.
627///
628/// This is used in the `service_info` host-call.
629#[derive(Debug, Clone, Encode, Decode, MaxEncodedLen)]
630pub struct ServiceInfo {
631	/// The hash of the code of the service.
632	pub code_hash: CodeHash,
633	/// The existing balance of the service.
634	pub balance: Balance,
635	/// The minimum balance which the service must satisfy.
636	pub threshold: Balance,
637	/// The minimum amount of gas which must be provided to this service's `accumulate` for each
638	/// work item it must process.
639	pub min_item_gas: UnsignedGas,
640	/// The minimum amount of gas which must be provided to this service's `accumulate` for each
641	/// incoming transfer it must process.
642	pub min_memo_gas: UnsignedGas,
643	/// The total number of bytes used for data electively held for this service on-chain.
644	pub bytes: u64,
645	/// The total number of items of data electively held for this service on-chain.
646	pub items: u32,
647	/// Offset of storage footprint only above which a minimum deposit is needed.
648	pub deposit_offset: Balance,
649	/// Creation time slot.
650	pub creation_slot: Slot,
651	/// Most recent accumulation time slot.
652	pub last_accumulation_slot: Slot,
653	/// Parent service identifier.
654	pub parent_service: ServiceId,
655}
656
657impl ServiceInfo {
658	/// Field offsets in the encoded representation
659	pub const CODE_HASH_OFFSET: usize = 0;
660	pub const BALANCE_OFFSET: usize = 32;
661	pub const THRESHOLD_OFFSET: usize = 40;
662	pub const MIN_ITEM_GAS_OFFSET: usize = 48;
663	pub const MIN_MEMO_GAS_OFFSET: usize = 56;
664	pub const BYTES_OFFSET: usize = 64;
665	pub const ITEMS_OFFSET: usize = 72;
666	pub const DEPOSIT_OFFSET_OFFSET: usize = 76;
667	pub const CREATION_SLOT_OFFSET: usize = 84;
668	pub const LAST_ACCUMULATION_SLOT_OFFSET: usize = 88;
669	pub const PARENT_SERVICE_OFFSET: usize = 92;
670	/// Encoded length in bytes.
671	pub const ENCODED_LEN: usize = 96;
672}
673
674/// Refine result used for compact encoding of work result as prescribed by GP.
675struct CompactRefineResult(Result<WorkOutput, WorkError>);
676struct CompactRefineResultRef<'a>(&'a Result<WorkOutput, WorkError>);
677
678impl From<CompactRefineResult> for Result<WorkOutput, WorkError> {
679	fn from(value: CompactRefineResult) -> Self {
680		value.0
681	}
682}
683
684impl<'a> From<&'a Result<WorkOutput, WorkError>> for CompactRefineResultRef<'a> {
685	fn from(value: &'a Result<WorkOutput, WorkError>) -> Self {
686		CompactRefineResultRef(value)
687	}
688}
689
690impl<'a> codec::EncodeAsRef<'a, Result<WorkOutput, WorkError>> for CompactRefineResult {
691	type RefType = CompactRefineResultRef<'a>;
692}
693
694impl Encode for CompactRefineResult {
695	fn encode_to<T: codec::Output + ?Sized>(&self, dest: &mut T) {
696		CompactRefineResultRef(&self.0).encode_to(dest)
697	}
698}
699
700impl MaxEncodedLen for CompactRefineResult {
701	fn max_encoded_len() -> usize {
702		(1 + WorkOutput::max_encoded_len()).max(WorkError::max_encoded_len())
703	}
704}
705
706impl Encode for CompactRefineResultRef<'_> {
707	fn encode_to<T: codec::Output + ?Sized>(&self, dest: &mut T) {
708		match &self.0 {
709			Ok(o) => {
710				dest.push_byte(0);
711				o.encode_to(dest)
712			},
713			Err(e) => e.encode_to(dest),
714		}
715	}
716}
717
718impl Decode for CompactRefineResult {
719	fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
720		match input.read_byte()? {
721			0 => Ok(Self(Ok(WorkOutput::decode(input)?))),
722			e => Ok(Self(Err(WorkError::decode(&mut &[e][..])?))),
723		}
724	}
725}
726
727macro_rules! fetch_kind {
728	($($(#[$attr:meta])* $variant:ident = $value:expr),* $(,)?) => {
729		/// Fetch host calls variants identifiers.
730		#[derive(Copy, Clone, Debug)]
731		pub enum FetchKind {
732			$(
733				$(#[$attr])*
734				$variant = $value,
735			)*
736		}
737
738		impl TryFrom<u64> for FetchKind {
739			type Error = ();
740			fn try_from(value: u64) -> Result<Self, Self::Error> {
741				match value {
742					$(
743						$value => Ok(FetchKind::$variant),
744					)*
745					_ => Err(()),
746				}
747			}
748		}
749	};
750}
751
752fetch_kind! {
753	/// Protocol stateless parameters.
754	ProtocolParameters = 0,
755	/// Entropy.
756	Entropy = 1,
757	/// Output from the parameterized authorizer code.
758	AuthTrace = 2,
759	/// A particular extrinsic of a given work-item.
760	AnyExtrinsic = 3,
761	/// A particular extrinsic of the executing work-item.
762	OurExtrinsic = 4,
763	/// A particular import-segment of a given work-item.
764	AnyImport = 5,
765	/// A particular import-segment of the executing work-item.
766	OurImport = 6,
767	/// Current work-package.
768	WorkPackage = 7,
769	/// Work package authorization code hash and config blob.
770	Authorizer = 8,
771	/// Input provided to the parameterized authorizer code.
772	AuthToken = 9,
773	/// Refine context.
774	RefineContext = 10,
775	/// All work items information summary.
776	ItemsSummary = 11,
777	/// A particular work item information summary.
778	AnyItemSummary = 12,
779	/// A particular work item payload.
780	AnyPayload = 13,
781	/// All accumulate items.
782	AccumulateItems = 14,
783	/// A particular accumulate item.
784	AnyAccumulateItem = 15,
785}
786
787/// Work item summary information used by fetch host call.
788#[derive(Clone, Encode, Decode, Debug)]
789pub struct WorkItemSummary {
790	pub service: ServiceId,
791	pub code_hash: CodeHash,
792	pub refine_gas_limit: UnsignedGas,
793	pub accumulate_gas_limit: UnsignedGas,
794	pub export_count: u16,
795	pub import_count: u16,
796	pub extrinsics_count: u16,
797	pub payload_len: u32,
798}
799
800impl From<&WorkItem> for WorkItemSummary {
801	fn from(w: &WorkItem) -> Self {
802		WorkItemSummary {
803			service: w.service,
804			code_hash: w.code_hash,
805			refine_gas_limit: w.refine_gas_limit,
806			accumulate_gas_limit: w.accumulate_gas_limit,
807			export_count: w.export_count,
808			import_count: w.import_segments.len() as u16,
809			extrinsics_count: w.extrinsics.len() as u16,
810			payload_len: w.payload.len() as u32,
811		}
812	}
813}
814
815/// Page access mode.
816#[derive(Copy, Clone, Debug)]
817pub enum PageMode {
818	ReadOnly = 0,
819	ReadWrite = 1,
820}
821
822/// Page operation.
823#[derive(Copy, Clone, Debug)]
824pub enum PageOperation {
825	Free,
826	Alloc(PageMode),
827	SetMode(PageMode),
828}
829
830impl From<PageOperation> for u64 {
831	fn from(value: PageOperation) -> Self {
832		match value {
833			PageOperation::Free => 0,
834			PageOperation::Alloc(mode) => 1 + mode as u64,
835			PageOperation::SetMode(mode) => 3 + mode as u64,
836		}
837	}
838}
839
840impl TryFrom<u64> for PageOperation {
841	type Error = ();
842	fn try_from(operation: u64) -> Result<Self, Self::Error> {
843		match operation {
844			0 => Ok(Self::Free),
845			1 => Ok(Self::Alloc(PageMode::ReadOnly)),
846			2 => Ok(Self::Alloc(PageMode::ReadWrite)),
847			3 => Ok(Self::SetMode(PageMode::ReadOnly)),
848			4 => Ok(Self::SetMode(PageMode::ReadWrite)),
849			_ => Err(()),
850		}
851	}
852}
853
854#[cfg(test)]
855mod tests {
856	use super::*;
857	use bounded_collections::{bounded_vec, TryCollect};
858	use codec::DecodeAll;
859
860	#[test]
861	fn compact_refine_result_codec() {
862		let enc_dec = |exp_res, exp_buf: &[u8]| {
863			let buf = CompactRefineResultRef(&exp_res).encode();
864			assert_eq!(buf, exp_buf);
865			let res = CompactRefineResult::decode(&mut &buf[..]).unwrap();
866			assert_eq!(res.0, exp_res);
867		};
868
869		enc_dec(Ok(vec![1, 2, 3].into()), &[0, 3, 1, 2, 3]);
870		enc_dec(Err(WorkError::OutOfGas), &[1]);
871		enc_dec(Err(WorkError::Panic), &[2]);
872		enc_dec(Err(WorkError::BadExports), &[3]);
873		enc_dec(Err(WorkError::OutputOversize), &[4]);
874		enc_dec(Err(WorkError::BadCode), &[5]);
875		enc_dec(Err(WorkError::CodeOversize), &[6]);
876	}
877
878	#[test]
879	fn service_info_encoded_len_is_correct() {
880		assert_eq!(ServiceInfo::max_encoded_len(), ServiceInfo::ENCODED_LEN);
881	}
882
883	#[test]
884	fn service_info_item_offset_works() {
885		let default = ServiceInfo {
886			code_hash: Default::default(),
887			balance: Default::default(),
888			threshold: Default::default(),
889			min_item_gas: Default::default(),
890			min_memo_gas: Default::default(),
891			bytes: Default::default(),
892			items: Default::default(),
893			deposit_offset: Default::default(),
894			creation_slot: Default::default(),
895			last_accumulation_slot: Default::default(),
896			parent_service: Default::default(),
897		};
898
899		macro_rules! sanity_check {
900			($field:ident, $type:ident, $offset:expr, $value:expr) => {{
901				// Verify the type has a fixed encoded size
902				let type_size = core::mem::size_of::<$type>();
903				assert_eq!(type_size, $type::max_encoded_len());
904				let expected = $type::from($value);
905				let test_struct = ServiceInfo { $field: expected, ..default };
906				let buffer = test_struct.encode();
907				// Decode the field from the known offset
908				let actual: $type =
909					DecodeAll::decode_all(&mut &buffer[$offset..$offset + type_size]).unwrap();
910				assert_eq!(expected, actual);
911			}};
912			($field:ident, $type:ident, $offset:expr) => {{
913				sanity_check!($field, $type, $offset, <$type>::MAX);
914			}};
915		}
916
917		sanity_check!(code_hash, CodeHash, ServiceInfo::CODE_HASH_OFFSET, [0xff_u8; 32]);
918		sanity_check!(balance, Balance, ServiceInfo::BALANCE_OFFSET);
919		sanity_check!(threshold, Balance, ServiceInfo::THRESHOLD_OFFSET);
920		sanity_check!(min_item_gas, UnsignedGas, ServiceInfo::MIN_ITEM_GAS_OFFSET);
921		sanity_check!(min_memo_gas, UnsignedGas, ServiceInfo::MIN_MEMO_GAS_OFFSET);
922		sanity_check!(bytes, u64, ServiceInfo::BYTES_OFFSET);
923		sanity_check!(items, u32, ServiceInfo::ITEMS_OFFSET);
924		sanity_check!(deposit_offset, Balance, ServiceInfo::DEPOSIT_OFFSET_OFFSET);
925		sanity_check!(creation_slot, Slot, ServiceInfo::CREATION_SLOT_OFFSET);
926		sanity_check!(last_accumulation_slot, Slot, ServiceInfo::LAST_ACCUMULATION_SLOT_OFFSET);
927		sanity_check!(parent_service, ServiceId, ServiceInfo::PARENT_SERVICE_OFFSET);
928	}
929
930	#[test]
931	fn context_max_encoded_len() {
932		assert_eq!(RefineContext::largest().encoded_size(), RefineContext::max_encoded_len());
933	}
934
935	#[test]
936	fn package_max_encoded_len() {
937		let portion = |total, i, num| (total / num) + if i == 0 { total % num } else { 0 };
938
939		let max_inputs = 2 + max_work_items();
940		let input = |i| vec![0; portion(max_input() as usize, i, max_inputs)];
941
942		let imports = |i| {
943			bounded_vec![
944				ImportSpec { root: RootIdentifier::Direct(Default::default()), index: u16::MAX };
945				portion(max_imports() as usize, i, max_work_items())
946			]
947		};
948
949		let extrinsics = |i| {
950			bounded_vec![
951				ExtrinsicSpec { hash: Default::default(), len: u32::MAX };
952				portion(max_extrinsics() as usize, i, max_work_items())
953			]
954		};
955
956		// The largest possible work-package, or at least very close -- it might be possible to get
957		// a few bytes more by eg rebalancing the input Vec lengths, as Vec lengths use compact
958		// encoding.
959		//
960		// "Largest possible work-package" is actually a bit of a lie -- it isn't really possible
961		// for a work-package to be this large as the total input size would exceed max_input()
962		// after accounting for the import segments. WorkPackage::max_encoded_len() does _not_
963		// account for these though, and we currently assume this accounting is lacking in the
964		// max_bundle_size() implementation in jam-std-common. Hence the construction of an
965		// invalidly large work-package here.
966		let largest_package = WorkPackage {
967			authorization: input(0).into(),
968			auth_code_host: ServiceId::MAX,
969			authorizer: Authorizer { code_hash: Default::default(), config: input(1).into() },
970			context: RefineContext::largest(),
971			items: (0..max_work_items())
972				.map(|i| WorkItem {
973					service: ServiceId::MAX,
974					code_hash: Default::default(),
975					payload: input(2 + i).into(),
976					refine_gas_limit: UnsignedGas::MAX,
977					accumulate_gas_limit: UnsignedGas::MAX,
978					import_segments: imports(i),
979					extrinsics: extrinsics(i),
980					export_count: u16::MAX,
981				})
982				.try_collect()
983				.unwrap(),
984		};
985		assert_eq!(largest_package.import_count(), max_imports());
986		assert_eq!(largest_package.extrinsic_count(), max_extrinsics());
987		let largest_package_size = largest_package.encoded_size();
988
989		// WorkPackage::max_encoded_len() assumes each input/import-spec/extrinsic-spec Vec can be
990		// the maximum length when calculating how many bytes are needed for encoding Vec lengths
991		// in the worst case. The Vecs cannot all be the maximum length though due to the shared
992		// nature of max_input()/max_imports()/etc. To account for this discrepancy, we allow
993		// WorkPackage::max_encoded_len() to overshoot largest_package_size by 1 for each such Vec.
994		let max = WorkPackage::max_encoded_len();
995		assert!(largest_package_size <= max);
996		assert!((largest_package_size + max_inputs + (2 * max_work_items())) >= max);
997	}
998}