1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
/*!
* Webvh utilizes Log Entries for each version change of the DID Document.
*/
use std::{fs::OpenOptions, io::Write};
use crate::{DIDWebVHError, SCID_HOLDER, parameters::Parameters};
use affinidi_data_integrity::DataIntegrityProof;
use affinidi_secrets_resolver::secrets::Secret;
use chrono::Utc;
use multibase::Base;
use multihash::Multihash;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use serde_json_canonicalizer::to_string;
use sha2::{Digest, Sha256};
use tracing::debug;
pub mod read;
/// Each version of the DID gets a new log entry
/// [Log Entries](https://identity.foundation/didwebvh/v1.0/#the-did-log-file)
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct LogEntry {
/// format integer-prev_hash
pub version_id: String,
/// ISO 8601 date format
pub version_time: String,
/// configuration options from the controller
pub parameters: Parameters,
/// DID document
pub state: Value,
/// Data Integrity Proof
#[serde(skip_serializing_if = "Option::is_none")]
pub proof: Option<DataIntegrityProof>,
}
impl LogEntry {
/// Creates and resturns the first webvh log Entry.
/// Generates the SCID and Data Integrity proof
///
/// Inputs:
/// - version_time: Optional ISO 8601 date string, If not given, defaults to now.
/// - document: The DID Document as a JSON Value.
/// - parameters: The Parameters for the Log Entry.
/// - secret: The Secret used to sign the Log Entry.
///
/// Returns:
/// - A valid Log Entry
pub async fn create_first_entry(
version_time: Option<String>,
document: &Value,
parameters: &Parameters,
secret: &Secret,
) -> Result<LogEntry, DIDWebVHError> {
let now = Utc::now();
// Ensure SCID field is set correctly
let mut parameters = parameters.clone();
parameters.scid = Some(SCID_HOLDER.to_string());
// Create a VerificationMethod ID from the first updatekey
let vm_id = if let Some(Some(value)) = ¶meters.update_keys {
if let Some(key) = value.iter().next() {
// Create a VerificationMethod ID from the first update key
["did:key:", key, "#", key].concat()
} else {
return Err(DIDWebVHError::SCIDError(
"No update keys provided in parameters".to_string(),
));
}
} else {
return Err(DIDWebVHError::SCIDError(
"No update keys provided in parameters".to_string(),
));
};
// Check that the vm_id matches the secret key id
if secret.id != vm_id {
return Err(DIDWebVHError::SCIDError(format!(
"Secret key ID {} does not match VerificationMethod ID {}",
secret.id, vm_id
)));
}
let log_entry = LogEntry {
version_id: SCID_HOLDER.to_string(),
version_time: version_time
.unwrap_or_else(|| now.to_rfc3339_opts(chrono::SecondsFormat::Secs, true)),
parameters,
state: document.clone(),
proof: None,
};
// Create the SCID from the first log entry
let scid = log_entry.generate_scid()?;
// Replace all instances of {SCID} with the actual SCID
let le_str = serde_json::to_string(&log_entry).map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't serialize LogEntry to JSON. Reason: {}",
e
))
})?;
let mut log_entry: LogEntry = serde_json::from_str(&le_str.replace(SCID_HOLDER, &scid))
.map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't deserialize LogEntry from SCID conversion. Reason: {}",
e
))
})?;
// Create the entry hash for this Log Entry
let entry_hash = log_entry.generate_log_entry_hash().map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't generate entryHash for first LogEntry. Reason: {}",
e
))
})?;
log_entry.version_id = ["1-", &entry_hash].concat();
// Generate the proof for the log entry
let log_entry_values = serde_json::to_value(&log_entry).map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't convert LogEntry to JSON Values for Signing. Reason: {}",
e
))
})?;
let log_entry = serde_json::from_value(
DataIntegrityProof::sign_data_jcs(
&serde_json::from_value(log_entry_values).map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't convert LogEntry to JSON Values for Signing. Reason: {}",
e
))
})?,
&vm_id,
secret,
)
.map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't generate Data Integrity Proof for LogEntry. Reason: {}",
e
))
})?,
)
.map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't deserialize signed LogEntry. Reason: {}",
e
))
})?;
Ok(log_entry)
}
/// Takes an existing LogEntry and creates a new LogEntry from it
pub async fn create_new_log_entry(
previous_log_entry: &LogEntry,
version_time: Option<String>,
document: &Value,
parameters: &Parameters,
secret: &Secret,
) -> Result<LogEntry, DIDWebVHError> {
let mut new_entry = LogEntry {
version_id: previous_log_entry.version_id.clone(),
version_time: version_time
.unwrap_or_else(|| Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true)),
parameters: parameters.clone(),
state: document.clone(),
proof: None,
};
// Create the entry hash for this Log Entry
let entry_hash = new_entry.generate_log_entry_hash().map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't generate entryHash for LogEntry. Reason: {}",
e
))
})?;
// Increment the version-id
let (current_id, _) = LogEntry::get_version_id_fields(&new_entry.version_id)?;
new_entry.version_id = [&(current_id + 1).to_string(), "-", &entry_hash].concat();
// Generate the proof for the log entry
let log_entry_values = serde_json::to_value(&new_entry).map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't convert LogEntry to JSON Values for Signing. Reason: {}",
e
))
})?;
let log_entry = serde_json::from_value(
DataIntegrityProof::sign_data_jcs(
&serde_json::from_value(log_entry_values).map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't convert LogEntry to JSON Values for Signing. Reason: {}",
e
))
})?,
&secret.id,
secret,
)
.map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't generate Data Integrity Proof for LogEntry. Reason: {}",
e
))
})?,
)
.map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't deserialize signed LogEntry. Reason: {}",
e
))
})?;
Ok(log_entry)
}
/// Append a valid LogEntry to a file
pub fn save_to_file(&self, file_path: &str) -> Result<(), DIDWebVHError> {
let mut file = OpenOptions::new()
.create(true)
.append(true)
.open(file_path)
.map_err(|e| {
DIDWebVHError::LogEntryError(format!("Couldn't open file {}: {}", file_path, e))
})?;
file.write_all(
serde_json::to_string(self)
.map_err(|e| {
DIDWebVHError::LogEntryError(format!(
"Couldn't serialize LogEntry to JSON. Reason: {}",
e
))
})?
.as_bytes(),
)
.map_err(|e| {
DIDWebVHError::LogEntryError(format!(
"Couldn't append LogEntry to file({}). Reason: {}",
file_path, e
))
})?;
file.write_all("\n".as_bytes()).map_err(|e| {
DIDWebVHError::LogEntryError(format!(
"Couldn't append LogEntry to file({}). Reason: {}",
file_path, e
))
})?;
Ok(())
}
/// Generates a SCID from a preliminary LogEntry
/// This only needs to be called once when the DID is first created.
fn generate_scid(&self) -> Result<String, DIDWebVHError> {
self.generate_log_entry_hash().map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't generate SCID from preliminary LogEntry. Reason: {}",
e
))
})
}
/// Calculates a Log Entry hash
pub fn generate_log_entry_hash(&self) -> Result<String, DIDWebVHError> {
let jcs = to_string(self).map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't generate JCS from LogEntry. Reason: {}",
e
))
})?;
debug!("JCS for LogEntry hash: {}", jcs);
// SHA_256 code = 0x12, length of SHA256 is 32 bytes
let hash_encoded = Multihash::<32>::wrap(0x12, Sha256::digest(jcs.as_bytes()).as_slice())
.map_err(|e| {
DIDWebVHError::SCIDError(format!(
"Couldn't create multihash encoding for LogEntry. Reason: {}",
e
))
})?;
Ok(multibase::encode(Base::Base58Btc, hash_encoded.to_bytes()))
}
/*
/// Creates a new LogEntry based on the previous one, with updated parameters and document.
/// If previous is None, it creates the first log entry.
/// If previous is None, sets to the current time.
pub fn create_log_entry(
previous: &Option<LogEntry>,
parameters: &Parameters,
document: &Value,
version_time: Option<String>,
secret: &Secret,
) -> Result<LogEntry, DIDWebVHError> {
// create a new parameters based on previous or first log entry
let parameters = if let Some(previous) = previous {
// create diff of the previous parameters and new parameters
previous.parameters.diff(parameters)
} else {
// Setup first entry parameters
};
Err(DIDWebVHError::SCIDError(
"create_log_entry is not implemented".to_string(),
))
}*/
/// Takes a LogEntry and creates a new set of LogEntries to revoke the webvh DID
/// Returns one or more Log Entries
/// NOTE: May return more than a single log entry if updateKeys need to be revoked first.
pub fn revoke(&self) -> Result<Vec<LogEntry>, DIDWebVHError> {
let mut revoked_entry: LogEntry = self.clone();
revoked_entry.proof = None;
revoked_entry.parameters.deactivated = true;
revoked_entry.parameters.update_keys = Some(None);
Ok(Vec::new())
}
}