Skip to content

Commit d1ef94b

Browse files
committed
Ability to specify custom checkpoints via a csv file
1 parent 213f2e1 commit d1ef94b

File tree

8 files changed

+407
-19
lines changed

8 files changed

+407
-19
lines changed

Cargo.lock

Lines changed: 4 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -149,6 +149,7 @@ cfg-if = "1.0"
149149
chacha20poly1305 = "0.10"
150150
chrono = "0.4"
151151
clap = "4.5"
152+
csv = "1.3"
152153
ctor = "0.2"
153154
criterion = "0.5"
154155
crossterm = "0.28"

common/src/chain/config/mod.rs

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -63,12 +63,10 @@ use super::{
6363
TokensFeeVersion,
6464
};
6565

66-
use self::{
67-
checkpoints::Checkpoints,
68-
emission_schedule::{CoinUnit, DEFAULT_INITIAL_MINT},
69-
};
66+
use self::emission_schedule::{CoinUnit, DEFAULT_INITIAL_MINT};
7067

7168
pub use builder::Builder;
69+
pub use checkpoints::Checkpoints;
7270
pub use emission_schedule::{EmissionSchedule, EmissionScheduleFn, EmissionScheduleTabular};
7371

7472
const DEFAULT_MAX_FUTURE_BLOCK_TIME_OFFSET_V1: Duration = Duration::from_secs(120);

node-lib/Cargo.toml

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,17 +24,21 @@ utils-networking = { path = "../utils/networking" }
2424

2525
anyhow.workspace = true
2626
clap = { workspace = true, features = ["derive"] }
27+
csv.workspace = true
28+
directories.workspace = true
2729
file-rotate.workspace = true
30+
fs4.workspace = true
2831
jsonrpsee = { workspace = true, features = ["macros"] }
29-
tokio = { workspace = true, default-features = false }
32+
paste.workspace = true
3033
serde = { workspace = true, features = ["derive"] }
34+
thiserror.workspace = true
35+
tokio = { workspace = true, default-features = false }
3136
toml.workspace = true
32-
directories.workspace = true
33-
paste.workspace = true
34-
fs4.workspace = true
3537

3638
[dev-dependencies]
3739
crypto = { path = "../crypto" }
3840
randomness = { path = "../randomness" }
3941

42+
ctor.workspace = true
43+
rstest.workspace = true
4044
tempfile.workspace = true
Lines changed: 225 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,225 @@
1+
// Copyright (c) 2025 RBB S.r.l
2+
3+
// SPDX-License-Identifier: MIT
4+
// Licensed under the MIT License;
5+
// you may not use this file except in compliance with the License.
6+
// You may obtain a copy of the License at
7+
//
8+
// https://github.com/mintlayer/mintlayer-core/blob/master/LICENSE
9+
//
10+
// Unless required by applicable law or agreed to in writing, software
11+
// distributed under the License is distributed on an "AS IS" BASIS,
12+
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
// See the License for the specific language governing permissions and
14+
// limitations under the License.
15+
16+
use std::{
17+
collections::BTreeMap,
18+
path::{Path, PathBuf},
19+
str::FromStr as _,
20+
};
21+
22+
use common::{
23+
chain::GenBlock,
24+
primitives::{BlockHeight, Id, H256},
25+
};
26+
use utils::ensure;
27+
28+
pub fn read_checkpoints_from_csv_file(
29+
csv_file: &Path,
30+
) -> Result<BTreeMap<BlockHeight, Id<GenBlock>>, CheckpontsFromCsvReadError> {
31+
let file =
32+
std::fs::File::open(csv_file).map_err(|err| CheckpontsFromCsvReadError::FileOpenError {
33+
file: csv_file.to_owned(),
34+
error: err.to_string(),
35+
})?;
36+
37+
read_checkpoints_from_csv(file)
38+
}
39+
40+
pub fn read_checkpoints_from_csv(
41+
csv: impl std::io::Read,
42+
) -> Result<BTreeMap<BlockHeight, Id<GenBlock>>, CheckpontsFromCsvReadError> {
43+
// Note: flexible(true) means that lines with different field counts are allowed.
44+
// Our fields count is fixed to 2 and we only specify this to simplify the tests, where
45+
// we check for specific errors.
46+
let mut reader = csv::ReaderBuilder::new().has_headers(false).flexible(true).from_reader(csv);
47+
let expected_fields_count = 2;
48+
49+
let mut checkpoints = BTreeMap::new();
50+
51+
for (record_idx, result) in reader.records().enumerate() {
52+
let record = result.map_err(|err| CheckpontsFromCsvReadError::RecordReadError {
53+
error: err.to_string(),
54+
})?;
55+
56+
ensure!(
57+
record.len() == expected_fields_count,
58+
CheckpontsFromCsvReadError::UnexpectedFieldsCount {
59+
record_idx,
60+
actual_fields_count: record.len(),
61+
expected_fields_count
62+
}
63+
);
64+
65+
let block_height = record
66+
.get(0)
67+
.expect("field is known to be present")
68+
.parse::<u64>()
69+
.map_err(|_| CheckpontsFromCsvReadError::BadBlockHeight { record_idx })?;
70+
71+
let block_id = H256::from_str(record.get(1).expect("field is known to be present"))
72+
.map_err(|_| CheckpontsFromCsvReadError::BadBlockId { record_idx })?;
73+
74+
let already_existed =
75+
checkpoints.insert(BlockHeight::new(block_height), Id::new(block_id)).is_some();
76+
ensure!(
77+
!already_existed,
78+
CheckpontsFromCsvReadError::DuplicateCheckpoint {
79+
height: block_height
80+
}
81+
);
82+
}
83+
84+
Ok(checkpoints)
85+
}
86+
87+
#[derive(thiserror::Error, Clone, Debug, Eq, PartialEq)]
88+
pub enum CheckpontsFromCsvReadError {
89+
#[error("Cannon open file '{file}': {error}")]
90+
FileOpenError { file: PathBuf, error: String },
91+
92+
#[error("Error reading a record: {error}")]
93+
RecordReadError { error: String },
94+
95+
#[error("Unexpected fields count in record {record_idx}: expected {expected_fields_count}, got {actual_fields_count}")]
96+
UnexpectedFieldsCount {
97+
record_idx: usize,
98+
actual_fields_count: usize,
99+
expected_fields_count: usize,
100+
},
101+
102+
#[error("Bad block height in record {record_idx}")]
103+
BadBlockHeight { record_idx: usize },
104+
105+
#[error("Bad block id in record {record_idx}")]
106+
BadBlockId { record_idx: usize },
107+
108+
#[error("Duplicate checkpoint at height {height}")]
109+
DuplicateCheckpoint { height: u64 },
110+
}
111+
112+
#[cfg(test)]
113+
mod tests {
114+
use utils::concatln;
115+
116+
use super::*;
117+
118+
#[test]
119+
fn correct_read() {
120+
let mk_id = |id_str| Id::new(H256::from_str(id_str).unwrap());
121+
let data = concatln!(
122+
"500, C91C3DB7DFDCC296010546EC38F48A557D035DD0B34260BD6C5174709F8A7EB0",
123+
"1000, 1DCFB22374DA757882EEF26AF2B2D3ABDD1A4887C744346F6413C8D0B51DEBDF",
124+
"1500, 3F81279C128FF628C8F4055DF89173DDAA6597DAB7636E8B12CA386E7864DFE9"
125+
);
126+
let expected_checkpoints = BTreeMap::from([
127+
(
128+
BlockHeight::new(500),
129+
mk_id("C91C3DB7DFDCC296010546EC38F48A557D035DD0B34260BD6C5174709F8A7EB0"),
130+
),
131+
(
132+
BlockHeight::new(1000),
133+
mk_id("1DCFB22374DA757882EEF26AF2B2D3ABDD1A4887C744346F6413C8D0B51DEBDF"),
134+
),
135+
(
136+
BlockHeight::new(1500),
137+
mk_id("3F81279C128FF628C8F4055DF89173DDAA6597DAB7636E8B12CA386E7864DFE9"),
138+
),
139+
]);
140+
141+
let checkpoints = read_checkpoints_from_csv(data.as_bytes()).unwrap();
142+
assert_eq!(checkpoints, expected_checkpoints);
143+
144+
// Now write the csv to file and read it via `read_checkpoints_from_csv_file`.
145+
let temp_file = tempfile::NamedTempFile::new().unwrap();
146+
std::fs::write(temp_file.path(), data.as_bytes()).unwrap();
147+
let checkpoints_from_file = read_checkpoints_from_csv_file(temp_file.path()).unwrap();
148+
assert_eq!(checkpoints_from_file, expected_checkpoints);
149+
}
150+
151+
#[test]
152+
fn bad_fields_count() {
153+
let data1 = concatln!(
154+
"500, C91C3DB7DFDCC296010546EC38F48A557D035DD0B34260BD6C5174709F8A7EB0",
155+
"1000, 1DCFB22374DA757882EEF26AF2B2D3ABDD1A4887C744346F6413C8D0B51DEBDF, 111",
156+
"1500, 3F81279C128FF628C8F4055DF89173DDAA6597DAB7636E8B12CA386E7864DFE9"
157+
);
158+
let err = read_checkpoints_from_csv(data1.as_bytes()).unwrap_err();
159+
assert_eq!(
160+
err,
161+
CheckpontsFromCsvReadError::UnexpectedFieldsCount {
162+
record_idx: 1,
163+
actual_fields_count: 3,
164+
expected_fields_count: 2
165+
}
166+
);
167+
168+
let data1 = concatln!(
169+
"500, C91C3DB7DFDCC296010546EC38F48A557D035DD0B34260BD6C5174709F8A7EB0",
170+
"1000",
171+
"1500, 3F81279C128FF628C8F4055DF89173DDAA6597DAB7636E8B12CA386E7864DFE9"
172+
);
173+
let err = read_checkpoints_from_csv(data1.as_bytes()).unwrap_err();
174+
assert_eq!(
175+
err,
176+
CheckpontsFromCsvReadError::UnexpectedFieldsCount {
177+
record_idx: 1,
178+
actual_fields_count: 1,
179+
expected_fields_count: 2
180+
}
181+
);
182+
}
183+
184+
#[test]
185+
fn bad_block_height() {
186+
let data = concatln!(
187+
"500, C91C3DB7DFDCC296010546EC38F48A557D035DD0B34260BD6C5174709F8A7EB0",
188+
"X000, 1DCFB22374DA757882EEF26AF2B2D3ABDD1A4887C744346F6413C8D0B51DEBDF",
189+
"1500, 3F81279C128FF628C8F4055DF89173DDAA6597DAB7636E8B12CA386E7864DFE9"
190+
);
191+
let err = read_checkpoints_from_csv(data.as_bytes()).unwrap_err();
192+
assert_eq!(
193+
err,
194+
CheckpontsFromCsvReadError::BadBlockHeight { record_idx: 1 }
195+
);
196+
}
197+
198+
#[test]
199+
fn bad_block_id() {
200+
let data = concatln!(
201+
"500, C91C3DB7DFDCC296010546EC38F48A557D035DD0B34260BD6C5174709F8A7EB0",
202+
"1000, XDCFB22374DA757882EEF26AF2B2D3ABDD1A4887C744346F6413C8D0B51DEBDF",
203+
"1500, 3F81279C128FF628C8F4055DF89173DDAA6597DAB7636E8B12CA386E7864DFE9"
204+
);
205+
let err = read_checkpoints_from_csv(data.as_bytes()).unwrap_err();
206+
assert_eq!(
207+
err,
208+
CheckpontsFromCsvReadError::BadBlockId { record_idx: 1 }
209+
);
210+
}
211+
212+
#[test]
213+
fn duplicate_checkpoint() {
214+
let data = concatln!(
215+
"500, C91C3DB7DFDCC296010546EC38F48A557D035DD0B34260BD6C5174709F8A7EB0",
216+
"500, 1DCFB22374DA757882EEF26AF2B2D3ABDD1A4887C744346F6413C8D0B51DEBDF",
217+
"1500, 3F81279C128FF628C8F4055DF89173DDAA6597DAB7636E8B12CA386E7864DFE9"
218+
);
219+
let err = read_checkpoints_from_csv(data.as_bytes()).unwrap_err();
220+
assert_eq!(
221+
err,
222+
CheckpontsFromCsvReadError::DuplicateCheckpoint { height: 500 }
223+
);
224+
}
225+
}

node-lib/src/lib.rs

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515

1616
//! Top-level node runner as a library
1717
18+
mod checkpoints_from_file;
1819
mod config_files;
1920
mod mock_time;
2021
pub mod node_controller;
@@ -43,3 +44,11 @@ pub fn default_rpc_config(chain_config: &ChainConfig) -> RpcConfigFile {
4344
pub fn init_logging(_opts: &Options) {
4445
logging::init_logging()
4546
}
47+
48+
#[cfg(test)]
49+
mod tests {
50+
#[ctor::ctor]
51+
fn init() {
52+
logging::init_logging();
53+
}
54+
}

0 commit comments

Comments
 (0)