1mod mainnet1;
8mod mainnet2;
9mod mainnet3;
10mod reset_halt_bit;
11mod simple;
12mod testnet72;
13mod testnet74;
14mod testnet76;
15mod testnet77;
16mod testnet78;
17
18use anyhow::{ensure, Context};
19use penumbra_sdk_governance::StateReadExt;
20use penumbra_sdk_sct::component::clock::EpochRead;
21use std::path::{Path, PathBuf};
22use tracing::instrument;
23
24use cnidarium::Storage;
25use penumbra_sdk_app::SUBSTORE_PREFIXES;
26
27use flate2::write::GzEncoder;
28use flate2::Compression;
29use std::fs::File;
30
31#[derive(Debug)]
33pub enum Migration {
34 ReadyToStart,
36 SimpleMigration,
39 Testnet72,
42 Testnet74,
47 Testnet76,
51 Testnet77,
54 Testnet78,
58 Mainnet1,
61 Mainnet2,
64 Mainnet3,
67}
68
69impl Migration {
70 #[instrument(skip(pd_home, genesis_start, force))]
71 pub async fn migrate(
72 &self,
73 pd_home: PathBuf,
74 comet_home: Option<PathBuf>,
75 genesis_start: Option<tendermint::time::Time>,
76 force: bool,
77 ) -> anyhow::Result<()> {
78 tracing::debug!(
79 ?pd_home,
80 ?genesis_start,
81 ?force,
82 "preparing to run migration!"
83 );
84 let rocksdb_dir = pd_home.join("rocksdb");
85 let storage = Storage::load(rocksdb_dir, SUBSTORE_PREFIXES.to_vec()).await?;
86 ensure!(
87 storage.latest_snapshot().is_chain_halted().await || force,
88 "to run a migration, the chain halt bit must be set to `true` or use the `--force` cli flag"
89 );
90
91 let latest_version = storage.latest_version();
93 let block_height = storage.latest_snapshot().get_block_height().await?;
94 ensure!(
95 latest_version == block_height || force,
96 "local chain state version is corrupted: {} != {}",
97 latest_version,
98 block_height
99 );
100
101 tracing::info!("started migration");
102
103 if let Migration::ReadyToStart = self {
105 reset_halt_bit::migrate(storage, pd_home, genesis_start).await?;
106 return Ok(());
107 }
108
109 match self {
110 Migration::SimpleMigration => {
111 simple::migrate(storage, pd_home.clone(), genesis_start).await?
112 }
113 Migration::Mainnet1 => {
114 mainnet1::migrate(storage, pd_home.clone(), genesis_start).await?;
115 }
116 Migration::Mainnet2 => {
117 mainnet2::migrate(storage, pd_home.clone(), genesis_start).await?;
118 }
119 Migration::Mainnet3 => {
120 mainnet3::migrate(storage, pd_home.clone(), genesis_start).await?;
121 }
122 _ => unimplemented!("the specified migration is unimplemented"),
125 }
126
127 if let Some(comet_home) = comet_home {
128 let genesis_path = pd_home.join("genesis.json");
129 migrate_comet_data(comet_home, genesis_path).await?;
130 }
131
132 Ok(())
133 }
134}
135
136pub fn archive_directory(
139 src_directory: PathBuf,
140 archive_filepath: PathBuf,
141 subdir_within_archive: Option<String>,
142) -> anyhow::Result<()> {
143 if archive_filepath.exists() {
145 tracing::error!(
146 "export archive filepath already exists: {}",
147 archive_filepath.display()
148 );
149 anyhow::bail!("refusing to overwrite existing archive");
150 }
151
152 tracing::info!(
153 "creating archive {} -> {}",
154 src_directory.display(),
155 archive_filepath.display()
156 );
157 let tarball_file = File::create(&archive_filepath)
158 .context("failed to create file for archive: check parent directory and permissions")?;
159 let enc = GzEncoder::new(tarball_file, Compression::default());
160 let mut tarball = tar::Builder::new(enc);
161 let subdir_within_archive = subdir_within_archive.unwrap_or(String::from("."));
162 tarball
163 .append_dir_all(subdir_within_archive, src_directory.as_path())
164 .context("failed to package archive contents")?;
165 Ok(())
166}
167
168pub async fn last_block_timestamp(home: PathBuf) -> anyhow::Result<tendermint::Time> {
170 let rocksdb = home.join("rocksdb");
171 let storage = Storage::load(rocksdb, SUBSTORE_PREFIXES.to_vec())
172 .await
173 .context("error loading store for timestamp")?;
174 let state = storage.latest_snapshot();
175 let last_block_time = state
176 .get_current_block_timestamp()
177 .await
178 .context("error reading latest block timestamp")?;
179 storage.release().await;
180 Ok(last_block_time)
181}
182
183#[instrument(skip_all)]
184pub async fn migrate_comet_data(
185 comet_home: PathBuf,
186 new_genesis_file: PathBuf,
187) -> anyhow::Result<()> {
188 tracing::info!(?comet_home, ?new_genesis_file, "migrating comet data");
189
190 let genesis_contents =
192 std::fs::read_to_string(new_genesis_file).context("error reading new genesis file")?;
193 let genesis_json: serde_json::Value =
194 serde_json::from_str(&genesis_contents).context("error parsing new genesis file")?;
195 tracing::info!(?genesis_json, "parsed genesis file");
196 let initial_height = genesis_json["initial_height"]
197 .as_str()
198 .context("error reading initial_height from genesis file")?
199 .parse::<u64>()?;
200
201 let genesis_file = comet_home.join("config").join("genesis.json");
203 tracing::info!(?genesis_file, "writing genesis file to comet config");
204 std::fs::write(genesis_file, genesis_contents)
205 .context("error writing genesis file to comet config")?;
206
207 adjust_priv_validator_state(&comet_home, initial_height)?;
209
210 clear_comet_data(&comet_home)?;
212
213 Ok(())
214}
215
216#[instrument(skip_all)]
217fn adjust_priv_validator_state(comet_home: &Path, initial_height: u64) -> anyhow::Result<()> {
218 let priv_validator_state = comet_home.join("data").join("priv_validator_state.json");
219 let current_state: serde_json::Value =
220 serde_json::from_str(&std::fs::read_to_string(&priv_validator_state)?)?;
221
222 let current_height = current_state["height"]
223 .as_str()
224 .context("error reading height from priv_validator_state.json")?
225 .parse::<u64>()?;
226 if current_height < initial_height {
227 tracing::info!(
228 "increasing height in priv_validator_state from {} to {}",
229 current_height,
230 initial_height
231 );
232 let new_state = serde_json::json!({
233 "height": initial_height.to_string(), "round": 0,
235 "step": 0,
236 });
237 tracing::info!(?new_state, "updated priv_validator_state.json");
238 std::fs::write(
239 &priv_validator_state,
240 &serde_json::to_string_pretty(&new_state)?,
241 )?;
242 } else {
243 anyhow::bail!(
244 "priv_validator_state height {} is already greater than or equal to initial_height {}",
245 current_height,
246 initial_height
247 );
248 }
249
250 Ok(())
251}
252
253#[instrument(skip_all)]
254fn clear_comet_data(comet_home: &Path) -> anyhow::Result<()> {
255 let data_dir = comet_home.join("data");
256
257 for subdir in &["evidence.db", "state.db", "blockstore.db", "cs.wal"] {
262 let path = data_dir.join(subdir);
263 if path.exists() {
264 tracing::info!(?path, "removing file");
265 std::fs::remove_dir_all(path)?;
266 }
267 }
268
269 Ok(())
270}