1use std::{fmt, str::FromStr};
20
21use darkfi_sdk::{
22 blockchain::block_version,
23 crypto::{MerkleNode, MerkleTree},
24 monotree::{Hash as StateHash, EMPTY_HASH},
25};
26#[cfg(feature = "async-serial")]
27use darkfi_serial::{async_trait, FutAsyncWriteExt};
28use darkfi_serial::{deserialize, serialize, Encodable, SerialDecodable, SerialEncodable};
29use sled_overlay::{
30 serial::{parse_record, parse_u32_key_record},
31 sled,
32};
33
34use crate::{util::time::Timestamp, Error, Result};
35
36use super::{monero::MoneroPowData, SledDbOverlayPtr};
37
38#[derive(Clone, Debug, SerialEncodable, SerialDecodable)]
40#[allow(clippy::large_enum_variant)]
41pub enum PowData {
42 Darkfi,
44 Monero(MoneroPowData),
46}
47
48#[derive(Clone, Copy, Debug, Eq, PartialEq, SerialEncodable, SerialDecodable)]
49pub struct HeaderHash(pub [u8; 32]);
51
52impl HeaderHash {
53 pub fn new(data: [u8; 32]) -> Self {
54 Self(data)
55 }
56
57 #[inline]
58 pub fn inner(&self) -> &[u8; 32] {
59 &self.0
60 }
61
62 pub fn as_string(&self) -> String {
63 blake3::Hash::from_bytes(self.0).to_string()
64 }
65}
66
67impl FromStr for HeaderHash {
68 type Err = Error;
69
70 fn from_str(header_hash_str: &str) -> Result<Self> {
71 Ok(Self(*blake3::Hash::from_str(header_hash_str)?.as_bytes()))
72 }
73}
74
75impl fmt::Display for HeaderHash {
76 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
77 write!(f, "{}", self.as_string())
78 }
79}
80
81#[derive(Clone, Debug, SerialEncodable, SerialDecodable)]
83pub struct Header {
84 pub version: u8,
86 pub previous: HeaderHash,
88 pub height: u32,
90 pub timestamp: Timestamp,
92 pub nonce: u64,
94 pub transactions_root: MerkleNode,
96 pub state_root: StateHash,
98 pub pow_data: PowData,
100}
101
102impl Header {
103 pub fn new(previous: HeaderHash, height: u32, timestamp: Timestamp, nonce: u64) -> Self {
106 let version = block_version(height);
107 let transactions_root = MerkleTree::new(1).root(0).unwrap();
108 let state_root = *EMPTY_HASH;
109 let pow_data = PowData::Darkfi;
110 Self {
111 version,
112 previous,
113 height,
114 timestamp,
115 nonce,
116 transactions_root,
117 state_root,
118 pow_data,
119 }
120 }
121
122 pub fn hash(&self) -> HeaderHash {
124 let mut hasher = blake3::Hasher::new();
125
126 self.encode(&mut hasher).expect("blake3 hasher");
130
131 HeaderHash(hasher.finalize().into())
132 }
133
134 pub fn template_hash(&self) -> HeaderHash {
136 let mut hasher = blake3::Hasher::new();
137
138 self.version.encode(&mut hasher).expect("blake3 hasher");
142 self.previous.encode(&mut hasher).expect("blake3 hasher");
143 self.height.encode(&mut hasher).expect("blake3 hasher");
144 self.timestamp.encode(&mut hasher).expect("blake3 hasher");
145 self.nonce.encode(&mut hasher).expect("blake3 hasher");
146 self.transactions_root.encode(&mut hasher).expect("blake3 hasher");
147 self.state_root.encode(&mut hasher).expect("blake3 hasher");
148
149 HeaderHash(hasher.finalize().into())
150 }
151}
152
153impl Default for Header {
154 fn default() -> Self {
156 Header::new(
157 HeaderHash::new(blake3::hash(b"Let there be dark!").into()),
158 0u32,
159 Timestamp::current_time(),
160 0u64,
161 )
162 }
163}
164
165impl fmt::Display for Header {
166 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
167 let s = format!(
168 "{} {{\n\t{}: {}\n\t{}: {}\n\t{}: {}\n\t{}: {}\n\t{}: {}\n\t{}: {}\n\t{}: {}\n\t{}: {}\n\t{}: {:?}\n}}",
169 "Header",
170 "Hash",
171 self.hash(),
172 "Version",
173 self.version,
174 "Previous",
175 self.previous,
176 "Height",
177 self.height,
178 "Timestamp",
179 self.timestamp,
180 "Nonce",
181 self.nonce,
182 "Transactions Root",
183 self.transactions_root,
184 "State Root",
185 blake3::Hash::from_bytes(self.state_root),
186 "Proof of Work data",
187 self.pow_data,
188 );
189
190 write!(f, "{s}")
191 }
192}
193
194pub const SLED_HEADER_TREE: &[u8] = b"_headers";
195pub const SLED_SYNC_HEADER_TREE: &[u8] = b"_sync_headers";
196
197#[derive(Clone)]
200pub struct HeaderStore {
201 pub main: sled::Tree,
204 pub sync: sled::Tree,
208}
209
210impl HeaderStore {
211 pub fn new(db: &sled::Db) -> Result<Self> {
213 let main = db.open_tree(SLED_HEADER_TREE)?;
214 let sync = db.open_tree(SLED_SYNC_HEADER_TREE)?;
215 Ok(Self { main, sync })
216 }
217
218 pub fn insert(&self, headers: &[Header]) -> Result<Vec<HeaderHash>> {
220 let (batch, ret) = self.insert_batch(headers);
221 self.main.apply_batch(batch)?;
222 Ok(ret)
223 }
224
225 pub fn insert_sync(&self, headers: &[Header]) -> Result<()> {
227 let batch = self.insert_batch_sync(headers);
228 self.sync.apply_batch(batch)?;
229 Ok(())
230 }
231
232 pub fn insert_batch(&self, headers: &[Header]) -> (sled::Batch, Vec<HeaderHash>) {
239 let mut ret = Vec::with_capacity(headers.len());
240 let mut batch = sled::Batch::default();
241
242 for header in headers {
243 let headerhash = header.hash();
244 batch.insert(headerhash.inner(), serialize(header));
245 ret.push(headerhash);
246 }
247
248 (batch, ret)
249 }
250
251 pub fn insert_batch_sync(&self, headers: &[Header]) -> sled::Batch {
256 let mut batch = sled::Batch::default();
257
258 for header in headers {
259 batch.insert(&header.height.to_be_bytes(), serialize(header));
260 }
261
262 batch
263 }
264
265 pub fn contains(&self, headerhash: &HeaderHash) -> Result<bool> {
267 Ok(self.main.contains_key(headerhash.inner())?)
268 }
269
270 pub fn get(&self, headerhashes: &[HeaderHash], strict: bool) -> Result<Vec<Option<Header>>> {
276 let mut ret = Vec::with_capacity(headerhashes.len());
277
278 for hash in headerhashes {
279 if let Some(found) = self.main.get(hash.inner())? {
280 let header = deserialize(&found)?;
281 ret.push(Some(header));
282 continue
283 }
284 if strict {
285 return Err(Error::HeaderNotFound(hash.as_string()))
286 }
287 ret.push(None);
288 }
289
290 Ok(ret)
291 }
292
293 pub fn get_all(&self) -> Result<Vec<(HeaderHash, Header)>> {
297 let mut headers = vec![];
298
299 for header in self.main.iter() {
300 headers.push(parse_record(header.unwrap())?);
301 }
302
303 Ok(headers)
304 }
305
306 pub fn get_all_sync(&self) -> Result<Vec<(u32, Header)>> {
310 let mut headers = vec![];
311
312 for record in self.sync.iter() {
313 headers.push(parse_u32_key_record(record.unwrap())?);
314 }
315
316 Ok(headers)
317 }
318
319 pub fn get_first_sync(&self) -> Result<Option<Header>> {
322 let Some(found) = self.sync.first()? else { return Ok(None) };
323 let (_, header) = parse_u32_key_record(found)?;
324
325 Ok(Some(header))
326 }
327
328 pub fn get_last_sync(&self) -> Result<Option<Header>> {
331 let Some(found) = self.sync.last()? else { return Ok(None) };
332 let (_, header) = parse_u32_key_record(found)?;
333
334 Ok(Some(header))
335 }
336
337 pub fn get_after_sync(&self, height: u32, n: usize) -> Result<Vec<Header>> {
341 let mut ret = vec![];
342
343 let mut key = height;
344 let mut counter = 0;
345 while counter < n {
346 if let Some(found) = self.sync.get_gt(key.to_be_bytes())? {
347 let (height, hash) = parse_u32_key_record(found)?;
348 key = height;
349 ret.push(hash);
350 counter += 1;
351 continue
352 }
353 break
354 }
355
356 Ok(ret)
357 }
358
359 pub fn len_sync(&self) -> usize {
361 self.sync.len()
362 }
363
364 pub fn is_empty_sync(&self) -> bool {
366 self.sync.is_empty()
367 }
368
369 pub fn remove_sync(&self, heights: &[u32]) -> Result<()> {
371 let batch = self.remove_batch_sync(heights);
372 self.sync.apply_batch(batch)?;
373 Ok(())
374 }
375
376 pub fn remove_all_sync(&self) -> Result<()> {
378 let headers = self.get_all_sync()?;
379 let heights = headers.iter().map(|h| h.0).collect::<Vec<u32>>();
380 let batch = self.remove_batch_sync(&heights);
381 self.sync.apply_batch(batch)?;
382 Ok(())
383 }
384
385 pub fn remove_batch_sync(&self, heights: &[u32]) -> sled::Batch {
388 let mut batch = sled::Batch::default();
389
390 for height in heights {
391 batch.remove(&height.to_be_bytes());
392 }
393
394 batch
395 }
396}
397
398pub struct HeaderStoreOverlay(SledDbOverlayPtr);
400
401impl HeaderStoreOverlay {
402 pub fn new(overlay: &SledDbOverlayPtr) -> Result<Self> {
403 overlay.lock().unwrap().open_tree(SLED_HEADER_TREE, true)?;
404 Ok(Self(overlay.clone()))
405 }
406
407 pub fn insert(&self, headers: &[Header]) -> Result<Vec<HeaderHash>> {
412 let mut ret = Vec::with_capacity(headers.len());
413 let mut lock = self.0.lock().unwrap();
414
415 for header in headers {
416 let headerhash = header.hash();
417 lock.insert(SLED_HEADER_TREE, headerhash.inner(), &serialize(header))?;
418 ret.push(headerhash);
419 }
420
421 Ok(ret)
422 }
423
424 pub fn get(&self, headerhashes: &[HeaderHash], strict: bool) -> Result<Vec<Option<Header>>> {
430 let mut ret = Vec::with_capacity(headerhashes.len());
431 let lock = self.0.lock().unwrap();
432
433 for hash in headerhashes {
434 if let Some(found) = lock.get(SLED_HEADER_TREE, hash.inner())? {
435 let header = deserialize(&found)?;
436 ret.push(Some(header));
437 continue
438 }
439 if strict {
440 return Err(Error::HeaderNotFound(hash.as_string()))
441 }
442 ret.push(None);
443 }
444
445 Ok(ret)
446 }
447}