1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
use anyhow::Result;
use move_binary_format::file_format::CompiledModule;
use move_bytecode_utils::Modules;
use move_core_types::{
identifier::{IdentStr, Identifier},
language_storage::ModuleId,
};
use move_model::model::{FunctionEnv, GlobalEnv};
use move_read_write_set_types::ReadWriteSet;
use prover_bytecode::{
function_target_pipeline::{FunctionTargetPipeline, FunctionTargetsHolder, FunctionVariant},
read_write_set_analysis::{ReadWriteSetProcessor, ReadWriteSetState},
};
use read_write_set_dynamic::NormalizedReadWriteSetAnalysis;
use std::collections::BTreeMap;
pub struct ReadWriteSetAnalysis {
targets: FunctionTargetsHolder,
env: GlobalEnv,
}
pub fn analyze<'a>(
modules: impl IntoIterator<Item = &'a CompiledModule>,
) -> Result<ReadWriteSetAnalysis> {
let module_map = Modules::new(modules);
let dep_graph = module_map.compute_dependency_graph();
let topo_order = dep_graph.compute_topological_order()?;
analyze_sorted(topo_order)
}
pub fn analyze_sorted<'a>(
modules: impl IntoIterator<Item = &'a CompiledModule>,
) -> Result<ReadWriteSetAnalysis> {
let env = move_model::run_bytecode_model_builder(modules)?;
let mut pipeline = FunctionTargetPipeline::default();
pipeline.add_processor(ReadWriteSetProcessor::new());
let mut targets = FunctionTargetsHolder::default();
for module_env in env.get_modules() {
for func_env in module_env.get_functions() {
targets.add_target(&func_env)
}
}
pipeline.run(&env, &mut targets);
Ok(ReadWriteSetAnalysis { targets, env })
}
impl ReadWriteSetAnalysis {
pub fn get_summary(&self, module: &ModuleId, fun: &IdentStr) -> Option<&ReadWriteSetState> {
self.get_function_env(module, fun)
.map(|fenv| {
self.targets
.get_data(&fenv.get_qualified_id(), &FunctionVariant::Baseline)
.map(|data| data.annotations.get::<ReadWriteSetState>())
.flatten()
})
.flatten()
}
pub fn get_function_env(&self, module: &ModuleId, fun: &IdentStr) -> Option<FunctionEnv> {
self.env
.find_function_by_language_storage_id_name(module, fun)
}
pub fn normalize_all_scripts(
&self,
add_ons: Vec<(ModuleId, Identifier)>,
) -> NormalizedReadWriteSetAnalysis {
let mut result: BTreeMap<ModuleId, BTreeMap<Identifier, ReadWriteSet>> = BTreeMap::new();
for module in self.env.get_modules() {
let module_id = module.get_verified_module().self_id();
let module_entry = result.entry(module_id.clone()).or_default();
for func in module.get_functions() {
let func_name = func.get_identifier();
if func.is_script() || add_ons.contains(&(module_id.clone(), func_name.clone())) {
module_entry.insert(
func.get_identifier(),
self.targets
.get_data(&func.get_qualified_id(), &FunctionVariant::Baseline)
.map(|data| data.annotations.get::<ReadWriteSetState>())
.flatten()
.unwrap()
.normalize(&self.env),
);
}
}
}
NormalizedReadWriteSetAnalysis::new(result)
}
}