From 796a29be672a4d29f62775aef732fbeb5d9b1878 Mon Sep 17 00:00:00 2001 From: Stefan Schneider <31004026+schneiderstefan@users.noreply.github.com> Date: Wed, 4 Dec 2024 13:04:38 +0100 Subject: [PATCH 01/23] perf: Copy outside write lock (#2963) In the state manager's commit_and_certify function, it now does the copy_state step before taking the write lock for the list of states. Several API calls to the state manager rely on reading from the list of states, and they are blocked otherwise. This fixes a regression introduced by commit [e3ee007](https://github.com/dfinity/ic/commit/e3ee0072c1ea003c489b59fa10df2610c60fe2e6) --- rs/state_manager/src/lib.rs | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/rs/state_manager/src/lib.rs b/rs/state_manager/src/lib.rs index 22696200389..d2c3722f51f 100644 --- a/rs/state_manager/src/lib.rs +++ b/rs/state_manager/src/lib.rs @@ -3493,6 +3493,16 @@ impl StateManager for StateManagerImpl { Self::compute_certification_metadata(&self.metrics, &self.log, &state) .unwrap_or_else(|err| fatal!(self.log, "Failed to compute hash tree: {:?}", err)); + // This step is expensive, so we do it before the write lock for `states`. + let next_tip = { + let _timer = self + .metrics + .checkpoint_op_duration + .with_label_values(&["copy_state"]) + .start_timer(); + Some((height, state.deref().clone())) + }; + let mut states = self.states.write(); #[cfg(debug_assertions)] check_certifications_metadata_snapshots_and_states_metadata_are_consistent(&states); @@ -3573,14 +3583,7 @@ impl StateManager for StateManagerImpl { // The next call to take_tip() will take care of updating the // tip if needed. - { - let _timer = self - .metrics - .checkpoint_op_duration - .with_label_values(&["copy_state"]) - .start_timer(); - states.tip = Some((height, state.deref().clone())); - } + states.tip = next_tip; if scope == CertificationScope::Full { self.release_lock_and_persist_metadata(states); From ed008cf2e9af098a38656271aa1719d3d87ed538 Mon Sep 17 00:00:00 2001 From: Dimitris Sarlis Date: Wed, 4 Dec 2024 13:38:27 +0100 Subject: [PATCH 02/23] chore: Add best practices on use of panics in the DSM (#2961) This PR adds a document on best practices about using (or not using) panics in the deterministic state machine. The file is added in the replicated_state directory with symbolic links to the "top" level directories for the two teams mostly working on the DSM (execution and message routing). --------- Co-authored-by: Alin Sinpalean <58422065+alin-at-dfinity@users.noreply.github.com> --- .../best-practices-panics.md | 1 + rs/messaging/best-practices-panics.md | 1 + rs/replicated_state/best-practices-panics.md | 59 +++++++++++++++++++ ....md => best-practices-replicated-state.md} | 0 4 files changed, 61 insertions(+) create mode 120000 rs/execution_environment/best-practices-panics.md create mode 120000 rs/messaging/best-practices-panics.md create mode 100644 rs/replicated_state/best-practices-panics.md rename rs/replicated_state/{README.md => best-practices-replicated-state.md} (100%) diff --git a/rs/execution_environment/best-practices-panics.md b/rs/execution_environment/best-practices-panics.md new file mode 120000 index 00000000000..32bbbbad866 --- /dev/null +++ b/rs/execution_environment/best-practices-panics.md @@ -0,0 +1 @@ +../replicated_state/best-practices-panics.md \ No newline at end of file diff --git a/rs/messaging/best-practices-panics.md b/rs/messaging/best-practices-panics.md new file mode 120000 index 00000000000..32bbbbad866 --- /dev/null +++ b/rs/messaging/best-practices-panics.md @@ -0,0 +1 @@ +../replicated_state/best-practices-panics.md \ No newline at end of file diff --git a/rs/replicated_state/best-practices-panics.md b/rs/replicated_state/best-practices-panics.md new file mode 100644 index 00000000000..851f0665fb0 --- /dev/null +++ b/rs/replicated_state/best-practices-panics.md @@ -0,0 +1,59 @@ +# Best Practices: Using panics in the Deterministic State Machine + +## Scope of the document + +The scope of this document is to provide guidance on how/when to use panics in the Deterministic State Machine (DSM). Such panics are very likely to get the DSM into a crash loop, so they should only be reserved for cases where this is the only correct behavior. + +## Target audience + +The audience for this document are primarily engineers working in the Execution and Message Routing teams. These are the most common contributors to the DSM but it might be relevant for other DFINITY engineers as well if they find themselves in the position of needing to change something in the DSM. + +## Background + +In software development, it’s a fairly common practice when writing a service to let it crash in cases where some invariant is violated or the binary has gotten into an otherwise unrecoverable state. Typically, letting the binary restart resolves issues (at least on the user facing side) while engineers can separately look into why the crash was hit in the first place and fix the root cause behind the scenes. However, this practice does not really work well when it comes to the DSM of the IC: if replicas hit such a case somewhere in the DSM, they will most likely keep hitting it as they will try to process the same messages again after they restart from the last checkpoint (given the IC’s nature as a deterministic replicated state machine). + +If a subnet’s replicas enter such a crash loop, we typically need to perform a subnet recovery to get the subnet back up and running. This process can get quite lengthy and cumbersome (especially for subnets with large states), requires the involvement of a few teams (typically an expert from our side to perform any necessary fixes, the consensus team to perform the replay of state and the DRE team to coordinate replica version election and rollout on the affected subnet(s)) and so it should only be reserved for cases where there is really not much else that can be done. + +In many cases having the replicas panic is rather extreme and we can get away with raising a [critical error](https://sourcegraph.com/github.com/dfinity/ic@d7cac19658a397f862f9e162c32ac02d21a3d77d/-/blob/rs/monitoring/metrics/src/registry.rs?L160) instead and letting the replicas continue running. The critical error will inform us about potential issues since it will page the FIT on call while the subnet continues to make progress without affecting end user experience. + +The remainder of this document attempts to provide some guidance around when it’s ok to use panics or one should instead use the more forgiving critical error approach. + +## Hard Invariants vs Soft Invariants + +Code in `/rs/replicated_state` and `/rs/state_manager` uses the concept of hard vs soft invariants to decide when it’s necessary to panic and when it’s sufficient to raise a critical error or even just log an error message. + +A [hard] invariant refers to a condition that (1) holds all the time, and (2) whose violation affects code correctness: + +- We check these during deserialization and return an error (causing an upstream panic) if they don't hold. +- It is fine to assert/debug_assert (depending on how expensive these checks are) for them in production code. +- Proptests for these invariants are recommended, but can be skipped if there is consensus that they are not needed. + +Soft invariants are a superset of hard invariants above: + +- These include conditions that don't affect correctness of the code, but we still aim to uphold them at all times. +- They can be self healing, i.e., a violation will be fixed upon the next (few) modification(s). +- We never assert for them in production code, but may debug_assert and raise critical errors in case of a violation upon deserialization (cf. deserialization of `BlockmakerMetricsTimeSeries`). +- An example for a soft invariant is an upper bound on the number of elements in a data structure that maintains a sliding window of snapshots, where the actual number of snapshots does not affect correctness and we just want to ensure it does not grow indefinitely. + +Important: we do not attempt to restore invariants or soft invariants upon deserializing as it could change the past and lead to divergence if only some replicas restart. + +One important aspect to consider is state loading: it is relatively straightforward to preserve an invariant inside an in-memory data structure; but determinism often requires accepting values deserialized from a checkpoint. An invalid loaded value is OK for soft invariants, but must necessarily result in a panic for hard invariants. + + +## When should panics be used + +Given the impact of panics in the DSM, it is important to limit their usage only to cases when they are absolutely necessary. Here’s a list of common cases where panics should be preferred: + +- A system resource is exhausted. E.g. we do not have enough file descriptors and cannot create a new file in the State Manager. There’s not much we can do about it automatically, so we should panic in that case and handle it offline. +- An invariant in the implementation of a data structure is broken. In these cases, the data structure can perform any invariant checks internally in its implementation which is typically easier to verify/inspect if everything is contained within the data structure. An example of this is the [TaskQueue](https://sourcegraph.com/github.com/dfinity/ic@d7cac19658a397f862f9e162c32ac02d21a3d77d/-/blob/rs/replicated_state/src/canister_state/system_state.rs?L284) data structure. It’s also a good idea to have tests that cover any panics added and these should be typically safe to use. Alternatively, you should consider if debug_asserts would potentially be enough assuming we have good test coverage for the data structure. +- An invariant is broken which could lead to corrupted state or state divergence or some otherwise broken state that is difficult to recover from. In that case a panic should be preferred to avoid creating further more difficult to resolve issues. A good example of this would be the case where we handle [reserved cycles](https://sourcegraph.com/github.com/dfinity/ic@d7cac19658a397f862f9e162c32ac02d21a3d77d/-/blob/rs/system_api/src/sandbox_safe_system_state.rs?L514) in the system API. +- Hard invariants as defined above can result in panics when they get violated. + + +## When should panics be avoided + +In some cases it’s best to avoid panics and instead either try to handle the errors or raise critical errors instead if it’s not easy to handle the error in place. debug_asserts are also highly encouraged for these cases. + +- Wrong user input should never result in a replica panic. +- Invariants across components are very hard to maintain and keep up to date as code evolves. Avoid adding panics related to such invariants and use critical errors instead to make the teams aware about instances where we might hit an unexpected case (some exceptions might apply if the invariant could lead to corrupted state or be hard to recover from, but it really should be the last resort). A good example here is [load_canister_snapshot](https://www.google.com/url?q=https://sourcegraph.com/github.com/dfinity/ic@d7cac19658a397f862f9e162c32ac02d21a3d77d/-/blob/rs/execution_environment/src/canister_manager.rs?L2108-2126&sa=D&source=docs&ust=1733308984480434&usg=AOvVaw2K8SR6s3VMBOkSTryuhxga) where we check whether some scheduling precondition holds in the canister manager. In this case, using a panic would be very aggressive, stopping progress of the whole subnet for a bug related to canister snapshots. Returning an error back to the user and raising a critical error is the more appropriate handling of this case, containing the damage to some snapshots functionality only not working in case such a bug exists. +- Soft invariants as defined earlier should never result in a panic when they get violated. Instead raise a critical error or a debug_assert. diff --git a/rs/replicated_state/README.md b/rs/replicated_state/best-practices-replicated-state.md similarity index 100% rename from rs/replicated_state/README.md rename to rs/replicated_state/best-practices-replicated-state.md From af2605ffb831797432eefac8fc889a7e52f7e48b Mon Sep 17 00:00:00 2001 From: nabdullindfinity <135595192+nabdullindfinity@users.noreply.github.com> Date: Wed, 4 Dec 2024 14:39:05 +0100 Subject: [PATCH 03/23] feat(NODE-1534): discard for data partition (#2953) NODE-1534 For unassigned nodes, issue NVMe `discard` commands via `fstrim` on the replica data partition --- .../monitoring/fstrim/fstrim_tool.service | 2 +- rs/ic_os/fstrim_tool/src/lib.rs | 23 ++++++++-- rs/ic_os/fstrim_tool/src/main.rs | 6 +++ rs/ic_os/fstrim_tool/src/tests.rs | 45 +++++++++++++++++++ .../fstrim_tool/tests/integration_tests.rs | 28 ++++++++++++ 5 files changed, 99 insertions(+), 5 deletions(-) diff --git a/ic-os/components/monitoring/fstrim/fstrim_tool.service b/ic-os/components/monitoring/fstrim/fstrim_tool.service index 7a6247abacf..e8698a08fa8 100644 --- a/ic-os/components/monitoring/fstrim/fstrim_tool.service +++ b/ic-os/components/monitoring/fstrim/fstrim_tool.service @@ -3,7 +3,7 @@ Description=Discard unused blocks on /var/lib/ic/crypto filesystem [Service] Type=oneshot -ExecStart=/opt/ic/bin/fstrim_tool --target /var/lib/ic/crypto --metrics /run/node_exporter/collector_textfile/fstrim.prom +ExecStart=/opt/ic/bin/fstrim_tool --target /var/lib/ic/crypto --datadir_target /var/lib/ic/data --metrics /run/node_exporter/collector_textfile/fstrim.prom DeviceAllow=/dev/vda IPAddressDeny=any LockPersonality=yes diff --git a/rs/ic_os/fstrim_tool/src/lib.rs b/rs/ic_os/fstrim_tool/src/lib.rs index 30fb342a0d4..af39d6c6f5d 100644 --- a/rs/ic_os/fstrim_tool/src/lib.rs +++ b/rs/ic_os/fstrim_tool/src/lib.rs @@ -85,19 +85,34 @@ fn write_initialized_metrics_if_not_exist(metrics_filename: &str) -> Result<()> write_metrics_using_tmp_file(&metrics, metrics_filename) } +fn is_node_assigned() -> bool { + Path::new("/var/lib/ic/data/cups/cup.types.v1.CatchUpPackage.pb").exists() +} + pub fn fstrim_tool( command: &str, metrics_filename: String, target: String, init_only: bool, + datadir_target: String, ) -> Result<()> { let res = match init_only { false => { let start = std::time::Instant::now(); - let res = run_command(command, &target); - let elapsed = start.elapsed(); - update_metrics(elapsed, res.is_ok(), &metrics_filename)?; - res + let res_target = run_command(command, &target); + let elapsed_target = start.elapsed(); + update_metrics(elapsed_target, res_target.is_ok(), &metrics_filename)?; + + if !datadir_target.is_empty() && !is_node_assigned() { + // TODO observability changes needed, expand the metrics logic + // let start_datadir = std::time::Instant::now(); + let res_datadir = run_command(command, &datadir_target); + // let elapsed_datadir = start_datadir.elapsed(); + // update_metrics(elapsed_datadir, res_datadir.is_ok(), &metrics_filename)?; + res_target.and(res_datadir) + } else { + res_target + } } true => write_initialized_metrics_if_not_exist(&metrics_filename), }; diff --git a/rs/ic_os/fstrim_tool/src/main.rs b/rs/ic_os/fstrim_tool/src/main.rs index 18bf0473351..a8590f07aa6 100644 --- a/rs/ic_os/fstrim_tool/src/main.rs +++ b/rs/ic_os/fstrim_tool/src/main.rs @@ -19,6 +19,11 @@ struct FsTrimArgs { /// To be run on node start. If the metrics file exists, only the timestamps will be updated. /// If the metrics file does not exist, it will be created. init_only: bool, + #[arg(short = 'd', long = "datadir_target", default_value = "")] + /// Second target directory to run `fstrim` on, but only if the node is unassigned. + /// If the node is assigned to a subnet, this directory will be ignored. + /// Intended to be used with /var/lib/ic/data only, but can be used with any directory. + datadir_target: String, } pub fn main() -> Result<()> { @@ -29,5 +34,6 @@ pub fn main() -> Result<()> { opts.metrics_filename, opts.target, opts.init_only, + opts.datadir_target, ) } diff --git a/rs/ic_os/fstrim_tool/src/tests.rs b/rs/ic_os/fstrim_tool/src/tests.rs index f794bcff813..7fbb921466f 100644 --- a/rs/ic_os/fstrim_tool/src/tests.rs +++ b/rs/ic_os/fstrim_tool/src/tests.rs @@ -172,6 +172,8 @@ fn should_return_error_from_unsuccessfully_run_command() { #[test] fn should_fail_but_write_metrics_if_command_fails() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); + let tmp_dir2 = tempdir().expect("temp dir creation should succeed"); + let metrics_file = tmp_dir.path().join("fstrim.prom"); assert_matches!( fstrim_tool( @@ -186,6 +188,11 @@ fn should_fail_but_write_metrics_if_command_fails() { .expect("tmp_dir path should be valid") .to_string(), false, + tmp_dir2 + .path() + .to_str() + .expect("tmp_dir path should be valid") + .to_string(), ), Err(err) if err.to_string().contains("Failed to run command") @@ -197,6 +204,8 @@ fn should_fail_but_write_metrics_if_command_fails() { #[test] fn should_fail_if_command_cannot_be_run() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); + let tmp_dir2 = tempdir().expect("temp dir creation should succeed"); + let metrics_file = tmp_dir.path().join("fstrim.prom"); assert_matches!( fstrim_tool( @@ -211,6 +220,11 @@ fn should_fail_if_command_cannot_be_run() { .expect("tmp_dir path should be valid") .to_string(), false, + tmp_dir2 + .path() + .to_str() + .expect("tmp_dir path should be valid") + .to_string(), ), Err(err) if err.to_string().contains("Failed to run command") @@ -220,6 +234,7 @@ fn should_fail_if_command_cannot_be_run() { #[test] fn should_not_run_command_but_initialize_metrics_if_flag_set() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); + let tmp_dir2 = tempdir().expect("temp dir creation should succeed"); let metrics_file = tmp_dir.path().join("fstrim.prom"); assert!(fstrim_tool( "/non/existent/command", @@ -233,6 +248,11 @@ fn should_not_run_command_but_initialize_metrics_if_flag_set() { .expect("tmp_dir path should be valid") .to_string(), true, + tmp_dir2 + .path() + .to_str() + .expect("tmp_dir path should be valid") + .to_string(), ) .is_ok()); @@ -242,6 +262,8 @@ fn should_not_run_command_but_initialize_metrics_if_flag_set() { #[test] fn should_not_overwrite_existing_metrics_if_metrics_init_flag_set() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); + let tmp_dir2 = tempdir().expect("temp dir creation should succeed"); + let metrics_file = tmp_dir.path().join("fstrim.prom"); assert!(fstrim_tool( "true", @@ -255,6 +277,11 @@ fn should_not_overwrite_existing_metrics_if_metrics_init_flag_set() { .expect("tmp_dir path should be valid") .to_string(), false, + tmp_dir2 + .path() + .to_str() + .expect("tmp_dir path should be valid") + .to_string(), ) .is_ok()); @@ -270,6 +297,11 @@ fn should_not_overwrite_existing_metrics_if_metrics_init_flag_set() { .expect("tmp_dir path should be valid") .to_string(), true, + tmp_dir2 + .path() + .to_str() + .expect("tmp_dir path should be valid") + .to_string(), ) .is_ok()); @@ -280,6 +312,7 @@ fn should_not_overwrite_existing_metrics_if_metrics_init_flag_set() { fn should_fail_if_metrics_file_cannot_be_written_to() { let metrics_file = PathBuf::from("/non/existent/directory/fstrim.prom"); let tmp_dir = tempdir().expect("temp dir creation should succeed"); + let tmp_dir2 = tempdir().expect("temp dir creation should succeed"); assert_matches!( fstrim_tool( "true", @@ -293,6 +326,11 @@ fn should_fail_if_metrics_file_cannot_be_written_to() { .expect("tmp_dir path should be valid") .to_string(), false, + tmp_dir2 + .path() + .to_str() + .expect("tmp_dir path should be valid") + .to_string(), ), Err(err) if err.to_string().contains("Failed to write metrics to file") @@ -302,6 +340,8 @@ fn should_fail_if_metrics_file_cannot_be_written_to() { #[test] fn should_fail_if_target_is_not_a_directory() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); + let tmp_dir2 = tempdir().expect("temp dir creation should succeed"); + let metrics_file = tmp_dir.path().join("fstrim.prom"); let target = PathBuf::from("/non/existent/target/directory"); let expected_error = format!( @@ -320,6 +360,11 @@ fn should_fail_if_target_is_not_a_directory() { .expect("tmp_dir path should be valid") .to_string(), false, + tmp_dir2 + .path() + .to_str() + .expect("tmp_dir path should be valid") + .to_string(), ), Err(err) if err.to_string() == expected_error diff --git a/rs/ic_os/fstrim_tool/tests/integration_tests.rs b/rs/ic_os/fstrim_tool/tests/integration_tests.rs index 76d6d319c72..3701250642c 100644 --- a/rs/ic_os/fstrim_tool/tests/integration_tests.rs +++ b/rs/ic_os/fstrim_tool/tests/integration_tests.rs @@ -105,6 +105,34 @@ fn should_fail_but_write_metrics_if_target_is_not_a_directory() { assert_metrics_file_content(&metrics_file, false, 1); } +// This fails if not tested under root user as the successful execution of the 1st target calls fstrim +// #[test] +// fn should_fail_but_write_metrics_if_data_target_is_not_a_directory() { +// let tmp_dir = tempdir().expect("temp dir creation should succeed"); +// let metrics_file = tmp_dir.path().join("fstrim.prom"); +// new_fstrim_tool_command() +// .args([ +// "--metrics", +// metrics_file +// .to_str() +// .expect("metrics file path should be valid"), +// "--target", +// tmp_dir +// .path() +// .to_str() +// .expect("tmp_dir path should be valid"), +// "--datadir_target", +// "/not/a/directory", +// ]) +// .assert() +// .stdout(predicate::str::is_empty()) +// .stderr(predicate::str::contains("not a directory")) +// .failure(); +// +// // As metrics now only target the main target, success will be reported +// assert_metrics_file_content(&metrics_file, true, 1); +// } + #[test] fn should_fail_but_write_metrics_with_discard_not_supported_with_correct_parameters() { let tmp_dir = tempdir().expect("temp dir creation should succeed"); From f97e9399f1f4c3a0174dfb85c2fe6f8ee0ec07c8 Mon Sep 17 00:00:00 2001 From: Carly Gundy <47304080+cgundy@users.noreply.github.com> Date: Wed, 4 Dec 2024 14:51:44 +0100 Subject: [PATCH 04/23] refactor(IDX): move CI_JOB_URL (#2949) --- .github/actions/bazel-test-all/action.yaml | 5 +++-- .github/workflows-source/ci-main.yml | 1 - .github/workflows-source/release-testing.yml | 1 - .github/workflows-source/schedule-daily.yml | 1 - .github/workflows-source/schedule-hourly.yml | 1 - .github/workflows/ci-main.yml | 1 - .github/workflows/release-testing.yml | 1 - .github/workflows/schedule-daily.yml | 1 - .github/workflows/schedule-hourly.yml | 1 - .github/workflows/schedule-rust-bench.yml | 1 - .github/workflows/system-tests-k8s.yml | 1 - 11 files changed, 3 insertions(+), 12 deletions(-) diff --git a/.github/actions/bazel-test-all/action.yaml b/.github/actions/bazel-test-all/action.yaml index 85d5e2f62bb..2317de7b86b 100644 --- a/.github/actions/bazel-test-all/action.yaml +++ b/.github/actions/bazel-test-all/action.yaml @@ -68,9 +68,10 @@ runs: BAZEL_CI_CONFIG: ${{ inputs.BAZEL_CI_CONFIG }} BAZEL_EXTRA_ARGS: ${{ inputs.BAZEL_EXTRA_ARGS }} BAZEL_STARTUP_ARGS: ${{ inputs.BAZEL_STARTUP_ARGS }} + BRANCH_HEAD_SHA: ${{ github.event.pull_request.head.sha }} + BUILDEVENT_APIKEY: ${{ inputs.BUILDEVENT_APIKEY }} CI_EVENT_NAME: ${{ github.event_name }} + CI_JOB_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" CI_PULL_REQUEST_TARGET_BRANCH_NAME: ${{ github.event.pull_request.base.ref }} MERGE_BASE_SHA: ${{ github.event.pull_request.base.sha }} - BRANCH_HEAD_SHA: ${{ github.event.pull_request.head.sha }} - BUILDEVENT_APIKEY: ${{ inputs.BUILDEVENT_APIKEY }} SSH_PRIVATE_KEY: ${{ inputs.SSH_PRIVATE_KEY }} diff --git a/.github/workflows-source/ci-main.yml b/.github/workflows-source/ci-main.yml index b99f14cf3f5..0d7cd19b843 100644 --- a/.github/workflows-source/ci-main.yml +++ b/.github/workflows-source/ci-main.yml @@ -23,7 +23,6 @@ concurrency: env: CI_COMMIT_SHA: ${{ github.sha }} CI_JOB_NAME: ${{ github.job }} - CI_JOB_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" CI_PROJECT_DIR: ${{ github.workspace }} CI_EVENT_NAME: ${{ github.event_name }} BRANCH_NAME: ${{ github.head_ref || github.ref_name }} diff --git a/.github/workflows-source/release-testing.yml b/.github/workflows-source/release-testing.yml index 8611ef4d72e..205927c98c3 100644 --- a/.github/workflows-source/release-testing.yml +++ b/.github/workflows-source/release-testing.yml @@ -15,7 +15,6 @@ concurrency: env: CI_COMMIT_SHA: ${{ github.sha }} CI_JOB_NAME: ${{ github.job }} - CI_JOB_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" CI_PROJECT_DIR: ${{ github.workspace }} BRANCH_NAME: ${{ github.event.workflow_run.head_branch || github.ref_name }} CI_RUN_ID: ${{ github.run_id }} diff --git a/.github/workflows-source/schedule-daily.yml b/.github/workflows-source/schedule-daily.yml index 8026c4b14e4..8e529a4c62f 100644 --- a/.github/workflows-source/schedule-daily.yml +++ b/.github/workflows-source/schedule-daily.yml @@ -10,7 +10,6 @@ env: BRANCH_NAME: ${{ github.head_ref || github.ref_name }} CI_COMMIT_SHA: ${{ github.sha }} CI_JOB_NAME: ${{ github.job }} - CI_JOB_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" CI_PROJECT_DIR: ${{ github.workspace }} CI_RUN_ID: ${{ github.run_id }} BUILDEVENT_DATASET: "github-ci-dfinity" diff --git a/.github/workflows-source/schedule-hourly.yml b/.github/workflows-source/schedule-hourly.yml index d8c123d4986..d9a5c4af6c3 100644 --- a/.github/workflows-source/schedule-hourly.yml +++ b/.github/workflows-source/schedule-hourly.yml @@ -9,7 +9,6 @@ env: BRANCH_NAME: ${{ github.head_ref || github.ref_name }} CI_COMMIT_SHA: ${{ github.sha }} CI_JOB_NAME: ${{ github.job }} - CI_JOB_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" CI_PROJECT_DIR: ${{ github.workspace }} CI_RUN_ID: ${{ github.run_id }} RUSTFLAGS: "--remap-path-prefix=${CI_PROJECT_DIR}=/ic" diff --git a/.github/workflows/ci-main.yml b/.github/workflows/ci-main.yml index c4c85e35889..e3ac0d83aa7 100644 --- a/.github/workflows/ci-main.yml +++ b/.github/workflows/ci-main.yml @@ -20,7 +20,6 @@ concurrency: env: CI_COMMIT_SHA: ${{ github.sha }} CI_JOB_NAME: ${{ github.job }} - CI_JOB_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" CI_PROJECT_DIR: ${{ github.workspace }} CI_EVENT_NAME: ${{ github.event_name }} BRANCH_NAME: ${{ github.head_ref || github.ref_name }} diff --git a/.github/workflows/release-testing.yml b/.github/workflows/release-testing.yml index a685050eaed..c84af5077db 100644 --- a/.github/workflows/release-testing.yml +++ b/.github/workflows/release-testing.yml @@ -12,7 +12,6 @@ concurrency: env: CI_COMMIT_SHA: ${{ github.sha }} CI_JOB_NAME: ${{ github.job }} - CI_JOB_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" CI_PROJECT_DIR: ${{ github.workspace }} BRANCH_NAME: ${{ github.event.workflow_run.head_branch || github.ref_name }} CI_RUN_ID: ${{ github.run_id }} diff --git a/.github/workflows/schedule-daily.yml b/.github/workflows/schedule-daily.yml index 8ba8c49678e..71d716cd4b0 100644 --- a/.github/workflows/schedule-daily.yml +++ b/.github/workflows/schedule-daily.yml @@ -8,7 +8,6 @@ env: BRANCH_NAME: ${{ github.head_ref || github.ref_name }} CI_COMMIT_SHA: ${{ github.sha }} CI_JOB_NAME: ${{ github.job }} - CI_JOB_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" CI_PROJECT_DIR: ${{ github.workspace }} CI_RUN_ID: ${{ github.run_id }} BUILDEVENT_DATASET: "github-ci-dfinity" diff --git a/.github/workflows/schedule-hourly.yml b/.github/workflows/schedule-hourly.yml index f313e5c6bc5..400f8910f99 100644 --- a/.github/workflows/schedule-hourly.yml +++ b/.github/workflows/schedule-hourly.yml @@ -7,7 +7,6 @@ env: BRANCH_NAME: ${{ github.head_ref || github.ref_name }} CI_COMMIT_SHA: ${{ github.sha }} CI_JOB_NAME: ${{ github.job }} - CI_JOB_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" CI_PROJECT_DIR: ${{ github.workspace }} CI_RUN_ID: ${{ github.run_id }} RUSTFLAGS: "--remap-path-prefix=${CI_PROJECT_DIR}=/ic" diff --git a/.github/workflows/schedule-rust-bench.yml b/.github/workflows/schedule-rust-bench.yml index a16e5c3bc36..d978aecb400 100644 --- a/.github/workflows/schedule-rust-bench.yml +++ b/.github/workflows/schedule-rust-bench.yml @@ -10,7 +10,6 @@ env: BRANCH_NAME: ${{ github.head_ref || github.ref_name }} CI_COMMIT_SHA: ${{ github.sha }} CI_JOB_NAME: ${{ github.job }} - CI_JOB_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" CI_PROJECT_DIR: ${{ github.workspace }} CI_RUN_ID: ${{ github.run_id }} BUILDEVENT_DATASET: "github-ci-dfinity" diff --git a/.github/workflows/system-tests-k8s.yml b/.github/workflows/system-tests-k8s.yml index 9b63accc484..c2f79a182b8 100644 --- a/.github/workflows/system-tests-k8s.yml +++ b/.github/workflows/system-tests-k8s.yml @@ -35,7 +35,6 @@ env: BRANCH_NAME: ${{ github.head_ref || github.ref_name }} CI_COMMIT_SHA: ${{ github.sha }} CI_JOB_NAME: ${{ github.job }} - CI_JOB_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" CI_PIPELINE_SOURCE: ${{ github.event_name }} CI_PROJECT_DIR: ${{ github.workspace }} CI_RUN_ID: ${{ github.run_id }} From e41a4efb71a0721b6150be3e12de87187b9daac6 Mon Sep 17 00:00:00 2001 From: Alin Sinpalean <58422065+alin-at-dfinity@users.noreply.github.com> Date: Wed, 4 Dec 2024 15:47:39 +0100 Subject: [PATCH 05/23] feat: More latency metrics (#2905) Add subnet latency histograms where missing or incomplete: * batch delivery latency (new); * block induction latency: we already have an ingress induction latency histogram, but that's useless when there are no ingress messages; * certification latency (new). --- rs/consensus/src/consensus/finalizer.rs | 10 +++++++++- rs/consensus/src/consensus/metrics.rs | 11 ++++++++++- rs/messaging/src/message_routing.rs | 9 +++++++++ rs/messaging/src/state_machine.rs | 8 ++++++++ rs/state_manager/src/lib.rs | 16 ++++++++++++++++ 5 files changed, 52 insertions(+), 2 deletions(-) diff --git a/rs/consensus/src/consensus/finalizer.rs b/rs/consensus/src/consensus/finalizer.rs index 30cbaf9077a..e4d1183072e 100644 --- a/rs/consensus/src/consensus/finalizer.rs +++ b/rs/consensus/src/consensus/finalizer.rs @@ -26,6 +26,7 @@ use ic_consensus_utils::{ use ic_interfaces::{ ingress_manager::IngressSelector, messaging::{MessageRouting, MessageRoutingError}, + time_source::system_time_now, }; use ic_interfaces_registry::RegistryClient; use ic_logger::{debug, trace, ReplicaLogger}; @@ -116,7 +117,7 @@ impl Finalizer { .collect() } - // Write logs, report metrics depending on the batch deliver result. + /// Write logs, report metrics depending on the batch deliver result. #[allow(clippy::too_many_arguments)] fn process_batch_delivery_result( &self, @@ -133,6 +134,13 @@ impl Finalizer { .observe(now.duration_since(last_batch_delivered_at).as_secs_f64()); } self.last_batch_delivered_at.borrow_mut().replace(now); + // Batch creation time is essentially wall time (on some replica), so the median + // duration across the subnet is meaningful. + self.metrics.batch_delivery_latency.observe( + system_time_now() + .saturating_duration_since(block_stats.block_time) + .as_secs_f64(), + ); self.metrics.process(&block_stats, &batch_stats); diff --git a/rs/consensus/src/consensus/metrics.rs b/rs/consensus/src/consensus/metrics.rs index f744d4d3055..7bf58e7c898 100644 --- a/rs/consensus/src/consensus/metrics.rs +++ b/rs/consensus/src/consensus/metrics.rs @@ -10,7 +10,7 @@ use ic_types::{ idkg::{CompletedReshareRequest, CompletedSignature, IDkgPayload, KeyTranscriptCreation}, Block, BlockPayload, BlockProposal, ConsensusMessageHashable, HasHeight, HasRank, }, - CountBytes, Height, + CountBytes, Height, Time, }; use prometheus::{ GaugeVec, Histogram, HistogramVec, IntCounter, IntCounterVec, IntGauge, IntGaugeVec, @@ -116,6 +116,7 @@ impl ConsensusMetrics { pub struct BlockStats { pub block_hash: String, pub block_height: u64, + pub block_time: Time, pub block_context_certified_height: u64, pub idkg_stats: Option, } @@ -125,6 +126,7 @@ impl From<&Block> for BlockStats { Self { block_hash: format!("{:?}", ic_types::crypto::crypto_hash(block)), block_height: block.height().get(), + block_time: block.context.time, block_context_certified_height: block.context.certified_height.get(), idkg_stats: block.payload.as_ref().as_idkg().map(IDkgStats::from), } @@ -224,6 +226,7 @@ pub struct FinalizerMetrics { pub batches_delivered: IntCounterVec, pub batch_height: IntGauge, pub batch_delivery_interval: Histogram, + pub batch_delivery_latency: Histogram, pub ingress_messages_delivered: Histogram, pub ingress_message_bytes_delivered: Histogram, pub xnet_bytes_delivered: Histogram, @@ -259,6 +262,12 @@ impl FinalizerMetrics { // 1ms, 2ms, 5ms, ..., 10s, 20s, 50s decimal_buckets(-3, 1), ), + batch_delivery_latency: metrics_registry.histogram( + "consensus_batch_delivery_latency_seconds", + "Wall time duration between block making and batch delivery, in seconds", + // 10ms, 20ms, 50ms, ..., 10s, 20s, 50s + decimal_buckets(-2, 2), + ), finalization_certified_state_difference: metrics_registry.int_gauge( "consensus_finalization_certified_state_difference", "The height difference between the finalized tip and the referenced certified state", diff --git a/rs/messaging/src/message_routing.rs b/rs/messaging/src/message_routing.rs index 7e0c717a0c3..a17777ae989 100644 --- a/rs/messaging/src/message_routing.rs +++ b/rs/messaging/src/message_routing.rs @@ -81,6 +81,7 @@ const PHASE_LOAD_STATE: &str = "load_state"; const PHASE_COMMIT: &str = "commit"; const METRIC_RECEIVE_BATCH_LATENCY: &str = "mr_receive_batch_latency_seconds"; +const METRIC_INDUCT_BATCH_LATENCY: &str = "mr_induct_batch_latency_seconds"; const METRIC_PROCESS_BATCH_DURATION: &str = "mr_process_batch_duration_seconds"; const METRIC_PROCESS_BATCH_PHASE_DURATION: &str = "mr_process_batch_phase_duration_seconds"; const METRIC_TIMED_OUT_MESSAGES_TOTAL: &str = "mr_timed_out_messages_total"; @@ -271,6 +272,8 @@ pub(crate) struct MessageRoutingMetrics { registry_version: IntGauge, /// How long Message Routing had to wait to receive the next batch. receive_batch_latency: Histogram, + /// Wall time duration between making a block and inducting it. + pub(crate) induct_batch_latency: Histogram, /// Batch processing durations. process_batch_duration: Histogram, /// Most recently seen certified height, per remote subnet @@ -369,6 +372,12 @@ impl MessageRoutingMetrics { // 0.1ms - 5s decimal_buckets(-4, 0), ), + induct_batch_latency: metrics_registry.histogram( + METRIC_INDUCT_BATCH_LATENCY, + "Wall time duration between block making and block induction.", + // 1ms - 50s + decimal_buckets(-3, 2), + ), process_batch_phase_duration: metrics_registry.histogram_vec( METRIC_PROCESS_BATCH_PHASE_DURATION, "Batch processing phase durations, by phase.", diff --git a/rs/messaging/src/state_machine.rs b/rs/messaging/src/state_machine.rs index 1a0533276fc..0116e6be352 100644 --- a/rs/messaging/src/state_machine.rs +++ b/rs/messaging/src/state_machine.rs @@ -7,6 +7,7 @@ use ic_config::execution_environment::Config as HypervisorConfig; use ic_interfaces::execution_environment::{ ExecutionRoundSummary, ExecutionRoundType, RegistryExecutionSettings, Scheduler, }; +use ic_interfaces::time_source::system_time_now; use ic_logger::{error, fatal, ReplicaLogger}; use ic_query_stats::deliver_query_stats; use ic_registry_subnet_features::SubnetFeatures; @@ -145,6 +146,13 @@ impl StateMachine for StateMachineImpl { // Preprocess messages and add messages to the induction pool through the Demux. let since = Instant::now(); let mut state_with_messages = self.demux.process_payload(state, batch.messages); + // Batch creation time is essentially wall time (on some replica), so the median + // duration should be meaningful. + self.metrics.induct_batch_latency.observe( + system_time_now() + .saturating_duration_since(batch.time) + .as_secs_f64(), + ); // Append additional responses to the consensus queue. state_with_messages diff --git a/rs/state_manager/src/lib.rs b/rs/state_manager/src/lib.rs index d2c3722f51f..e5b25d97173 100644 --- a/rs/state_manager/src/lib.rs +++ b/rs/state_manager/src/lib.rs @@ -145,6 +145,7 @@ pub struct StateManagerMetrics { api_call_duration: HistogramVec, last_diverged_state_timestamp: IntGauge, latest_certified_height: IntGauge, + certification_duration: Histogram, max_resident_height: IntGauge, min_resident_height: IntGauge, last_computed_manifest_height: IntGauge, @@ -365,6 +366,13 @@ impl StateManagerMetrics { "Height of the latest certified state.", ); + let certification_latency = metrics_registry.histogram( + "state_manager_certification_latency_seconds", + "Wall time taken to deliver a certification, in seconds.", + // 1ms, 2ms, 5ms, 10ms, 20ms, 50ms, …, 10s, 20s, 50s + decimal_buckets(-3, 2), + ); + let min_resident_height = metrics_registry.int_gauge( "state_manager_min_resident_height", "Height of the oldest state resident in memory.", @@ -441,6 +449,7 @@ impl StateManagerMetrics { api_call_duration, last_diverged_state_timestamp, latest_certified_height, + certification_duration: certification_latency, max_resident_height, min_resident_height, last_computed_manifest_height, @@ -720,6 +729,8 @@ struct CertificationMetadata { /// Certification of the root hash delivered by consensus via /// `deliver_state_certification()`. certification: Option, + /// Wall time when certification was requested. + certification_requested_at: Instant, } fn crypto_hash_of_partial_state(d: &Digest) -> CryptoHashOfPartialState { @@ -1962,6 +1973,7 @@ impl StateManagerImpl { hash_tree: Some(Arc::new(hash_tree)), certified_state_hash, certification: None, + certification_requested_at: Instant::now(), }) } @@ -2135,6 +2147,7 @@ impl StateManagerImpl { certified_state_hash: crypto_hash_of_tree(&hash_tree), hash_tree: Some(Arc::new(hash_tree)), certification: None, + certification_requested_at: Instant::now(), }; let mut states = self.states.write(); @@ -3248,6 +3261,9 @@ impl StateManager for StateManagerImpl { self.metrics .latest_certified_height .set(latest_certified as i64); + self.metrics + .certification_duration + .observe(metadata.certification_requested_at.elapsed().as_secs_f64()); metadata.certification = Some(certification); From 1ef8796c0b1b954c291da37f490f910ef7421e73 Mon Sep 17 00:00:00 2001 From: Dimitris Sarlis Date: Wed, 4 Dec 2024 15:48:04 +0100 Subject: [PATCH 06/23] fix: Update argument to start function of xnet canister in global reboot test (#2970) The changes in https://github.com/dfinity/ic/pull/806 missed updating the arguments to the start function of the xnet canister in a system test. This PR fixes the issue. --- rs/tests/message_routing/global_reboot_test.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/rs/tests/message_routing/global_reboot_test.rs b/rs/tests/message_routing/global_reboot_test.rs index f964002b6ea..b1060cb2580 100644 --- a/rs/tests/message_routing/global_reboot_test.rs +++ b/rs/tests/message_routing/global_reboot_test.rs @@ -48,7 +48,7 @@ use slog::{info, Logger}; use std::env; use std::time::Duration; use tokio::time::sleep; -use xnet_test::Metrics; +use xnet_test::{Metrics, StartArgs}; const SUBNETS_COUNT: usize = 2; const CANISTERS_PER_SUBNET: usize = 3; @@ -186,9 +186,13 @@ pub fn start_all_canisters( .enumerate() .flat_map(|(x, v)| v.iter().enumerate().map(move |(y, v)| (x, y, v))) { - let input = (&topology, canister_to_subnet_rate, payload_size_bytes); + let input = StartArgs { + network_topology: topology.clone(), + canister_to_subnet_rate, + payload_size_bytes, + }; let _: String = canister - .update_("start", candid, input) + .update_("start", candid, (input,)) .await .unwrap_or_else(|_| { panic!( From bd92560a7c8a35b152f772e8a837249b0a15094e Mon Sep 17 00:00:00 2001 From: Jack Lloyd Date: Wed, 4 Dec 2024 11:07:38 -0500 Subject: [PATCH 07/23] fix(crypto): Fix the management canister interface for Schnorr aux inputs (#2952) PR 2523 implemented a management canister interface which does not match the finalized interface change (https://github.com/dfinity/portal/pull/3758) --------- Co-authored-by: Dimitris Sarlis --- .../src/execution_environment.rs | 10 ++++--- .../src/execution_environment/tests.rs | 2 +- .../src/scheduler/tests.rs | 2 +- .../tests/threshold_signatures.rs | 2 +- rs/system_api/src/routing.rs | 2 +- rs/tests/consensus/tecdsa/utils/src/lib.rs | 4 +-- rs/types/management_canister_types/src/lib.rs | 28 ++++++++++++++++++- 7 files changed, 39 insertions(+), 11 deletions(-) diff --git a/rs/execution_environment/src/execution_environment.rs b/rs/execution_environment/src/execution_environment.rs index a176b0fe920..81f9ae0700a 100644 --- a/rs/execution_environment/src/execution_environment.rs +++ b/rs/execution_environment/src/execution_environment.rs @@ -41,7 +41,7 @@ use ic_management_canister_types::{ LoadCanisterSnapshotArgs, MasterPublicKeyId, Method as Ic00Method, NodeMetricsHistoryArgs, Payload as Ic00Payload, ProvisionalCreateCanisterWithCyclesArgs, ProvisionalTopUpCanisterArgs, SchnorrPublicKeyArgs, SchnorrPublicKeyResponse, SetupInitialDKGArgs, SignWithECDSAArgs, - SignWithSchnorrArgs, StoredChunksArgs, SubnetInfoArgs, SubnetInfoResponse, + SignWithSchnorrArgs, SignWithSchnorrAux, StoredChunksArgs, SubnetInfoArgs, SubnetInfoResponse, TakeCanisterSnapshotArgs, UninstallCodeArgs, UpdateSettingsArgs, UploadChunkArgs, VetKdPublicKeyArgs, VetKdPublicKeyResult, IC_00, }; @@ -1212,9 +1212,11 @@ impl ExecutionEnvironment { ThresholdArguments::Schnorr(SchnorrArguments { key_id: args.key_id, message: Arc::new(args.message), - taproot_tree_root: args - .taproot_tree_root - .map(|v| Arc::new(v.into_vec())), + taproot_tree_root: args.aux.map(|v| match v { + SignWithSchnorrAux::Bip341(v) => { + Arc::new(v.merkle_root_hash.into_vec()) + } + }), }), args.derivation_path.into_inner(), registry_settings diff --git a/rs/execution_environment/src/execution_environment/tests.rs b/rs/execution_environment/src/execution_environment/tests.rs index a5ad26eda9d..e3d23eb774e 100644 --- a/rs/execution_environment/src/execution_environment/tests.rs +++ b/rs/execution_environment/src/execution_environment/tests.rs @@ -168,7 +168,7 @@ fn sign_with_threshold_key_payload(method: Method, key_id: MasterPublicKeyId) -> message: vec![], derivation_path: DerivationPath::new(vec![]), key_id: into_inner_schnorr(key_id), - taproot_tree_root: None, + aux: None, } .encode(), Method::VetKdDeriveEncryptedKey => ic00::VetKdDeriveEncryptedKeyArgs { diff --git a/rs/execution_environment/src/scheduler/tests.rs b/rs/execution_environment/src/scheduler/tests.rs index c8b8329d20f..fb0c0bb64c3 100644 --- a/rs/execution_environment/src/scheduler/tests.rs +++ b/rs/execution_environment/src/scheduler/tests.rs @@ -3759,7 +3759,7 @@ fn threshold_signature_agreements_metric_is_updated() { message: vec![1; 128], derivation_path: DerivationPath::new(Vec::new()), key_id: schnorr_key_id, - taproot_tree_root: None, + aux: None, }) .unwrap(); diff --git a/rs/execution_environment/tests/threshold_signatures.rs b/rs/execution_environment/tests/threshold_signatures.rs index a4089d487ef..8b42b70578e 100644 --- a/rs/execution_environment/tests/threshold_signatures.rs +++ b/rs/execution_environment/tests/threshold_signatures.rs @@ -82,7 +82,7 @@ fn sign_with_threshold_key_payload(method: Method, key_id: MasterPublicKeyId) -> message: vec![], derivation_path: DerivationPath::new(vec![]), key_id: into_inner_schnorr(key_id), - taproot_tree_root: None, + aux: None, } .encode(), _ => panic!("unexpected method"), diff --git a/rs/system_api/src/routing.rs b/rs/system_api/src/routing.rs index 4db52561114..c42decdf254 100644 --- a/rs/system_api/src/routing.rs +++ b/rs/system_api/src/routing.rs @@ -548,7 +548,7 @@ mod tests { message: vec![1; 32], derivation_path: DerivationPath::new(vec![ByteBuf::from(vec![0; 10])]), key_id, - taproot_tree_root: None, + aux: None, }; Encode!(&args).unwrap() } diff --git a/rs/tests/consensus/tecdsa/utils/src/lib.rs b/rs/tests/consensus/tecdsa/utils/src/lib.rs index 8c96ebd2692..97dae4589cf 100644 --- a/rs/tests/consensus/tecdsa/utils/src/lib.rs +++ b/rs/tests/consensus/tecdsa/utils/src/lib.rs @@ -577,7 +577,7 @@ pub async fn get_schnorr_signature_with_logger( message, derivation_path: DerivationPath::new(Vec::new()), key_id: key_id.clone(), - taproot_tree_root: None, + aux: None, }; info!( logger, @@ -871,7 +871,7 @@ impl ChainSignatureRequest { message: vec![1; message_size], derivation_path: DerivationPath::new(Vec::new()), key_id: schnorr_key_id, - taproot_tree_root: None, + aux: None, }; ForwardParams { receiver: Principal::management_canister(), diff --git a/rs/types/management_canister_types/src/lib.rs b/rs/types/management_canister_types/src/lib.rs index c504cd184dc..8087b39fe36 100644 --- a/rs/types/management_canister_types/src/lib.rs +++ b/rs/types/management_canister_types/src/lib.rs @@ -2711,12 +2711,38 @@ impl ReshareChainKeyResponse { } } +/// Represents the BIP341 aux argument of the sign_with_schnorr API. +/// ```text +/// (record { +/// merkle_root_hash: blob; +/// }) +/// ``` +#[derive(Eq, PartialEq, Debug, CandidType, Deserialize)] +pub struct SignWithBip341Aux { + pub merkle_root_hash: ByteBuf, +} + +/// Represents the aux argument of the sign_with_schnorr API. +/// ```text +/// (variant { +/// bip341: record { +/// merkle_root_hash: blob; +/// } +/// }) +/// ``` +#[derive(Eq, PartialEq, Debug, CandidType, Deserialize)] +pub enum SignWithSchnorrAux { + #[serde(rename = "bip341")] + Bip341(SignWithBip341Aux), +} + /// Represents the argument of the sign_with_schnorr API. /// ```text /// (record { /// message : blob; /// derivation_path : vec blob; /// key_id : schnorr_key_id; +/// aux: opt schnorr_aux; /// }) /// ``` #[derive(Eq, PartialEq, Debug, CandidType, Deserialize)] @@ -2725,7 +2751,7 @@ pub struct SignWithSchnorrArgs { pub message: Vec, pub derivation_path: DerivationPath, pub key_id: SchnorrKeyId, - pub taproot_tree_root: Option, + pub aux: Option, } impl Payload<'_> for SignWithSchnorrArgs {} From 96944f42de5262a91dbf80e930bb03f2895c4023 Mon Sep 17 00:00:00 2001 From: Andre Popovitch Date: Wed, 4 Dec 2024 12:28:17 -0600 Subject: [PATCH 08/23] feat(sns): Bound number of entries in get_upgrade_journal response (#2873) Closes [NNS1-3416](https://dfinity.atlassian.net/browse/NNS1-3416) [NNS1-3416]: https://dfinity.atlassian.net/browse/NNS1-3416?atlOrigin=eyJpIjoiNWRkNTljNzYxNjVmNDY3MDlhMDU5Y2ZhYzA5YTRkZjUiLCJwIjoiZ2l0aHViLWNvbS1KU1cifQ --- .../src/pocket_ic_helpers.rs | 2 +- .../api/src/ic_sns_governance.pb.v1.rs | 13 +- rs/sns/governance/canister/canister.rs | 3 +- rs/sns/governance/canister/governance.did | 6 +- .../governance/canister/governance_test.did | 6 +- .../ic_sns_governance/pb/v1/governance.proto | 11 +- .../src/gen/ic_sns_governance.pb.v1.rs | 13 +- rs/sns/governance/src/governance.rs | 2 + .../advance_target_sns_version_tests.rs | 175 +++++++++++++++++- rs/sns/governance/src/pb/conversions.rs | 12 +- rs/sns/governance/src/upgrade_journal.rs | 46 ++++- 11 files changed, 267 insertions(+), 22 deletions(-) diff --git a/rs/nervous_system/integration_tests/src/pocket_ic_helpers.rs b/rs/nervous_system/integration_tests/src/pocket_ic_helpers.rs index 279596f6693..2cd287da49c 100644 --- a/rs/nervous_system/integration_tests/src/pocket_ic_helpers.rs +++ b/rs/nervous_system/integration_tests/src/pocket_ic_helpers.rs @@ -1687,7 +1687,7 @@ pub mod sns { pocket_ic: &PocketIc, sns_governance_canister_id: PrincipalId, ) -> std::result::Result { - let payload = sns_pb::GetUpgradeJournalRequest {}; + let payload = sns_pb::GetUpgradeJournalRequest::default(); pocket_ic.call(sns_governance_canister_id, payload).await } diff --git a/rs/sns/governance/api/src/ic_sns_governance.pb.v1.rs b/rs/sns/governance/api/src/ic_sns_governance.pb.v1.rs index 1cbdd6bf129..7992895a833 100644 --- a/rs/sns/governance/api/src/ic_sns_governance.pb.v1.rs +++ b/rs/sns/governance/api/src/ic_sns_governance.pb.v1.rs @@ -3689,7 +3689,16 @@ pub struct UpgradeJournal { PartialEq, ::prost::Message, )] -pub struct GetUpgradeJournalRequest {} +pub struct GetUpgradeJournalRequest { + /// Maximum number of journal entries to return. + /// If not specified, defaults to 100. Values larger than 100 will be capped at 100. + #[prost(uint64, optional, tag = "1")] + pub limit: ::core::option::Option, + /// The starting index from which to return entries, counting from the oldest entry (0). + /// If not specified, return the most recent entries. + #[prost(uint64, optional, tag = "2")] + pub offset: ::core::option::Option, +} #[derive( candid::CandidType, candid::Deserialize, @@ -3712,6 +3721,8 @@ pub struct GetUpgradeJournalResponse { pub deployed_version: ::core::option::Option, #[prost(message, optional, tag = "4")] pub upgrade_journal: ::core::option::Option, + #[prost(uint64, optional, tag = "6")] + pub upgrade_journal_entry_count: ::core::option::Option, } /// A request to mint tokens for a particular principal. The associated endpoint /// is only available on SNS governance, and only then when SNS governance is diff --git a/rs/sns/governance/canister/canister.rs b/rs/sns/governance/canister/canister.rs index 89afd45f59e..901a3810910 100644 --- a/rs/sns/governance/canister/canister.rs +++ b/rs/sns/governance/canister/canister.rs @@ -672,8 +672,7 @@ fn add_maturity(request: AddMaturityRequest) -> AddMaturityResponse { #[query] fn get_upgrade_journal(arg: GetUpgradeJournalRequest) -> GetUpgradeJournalResponse { - let GetUpgradeJournalRequest {} = arg; - governance().get_upgrade_journal() + governance().get_upgrade_journal(arg) } /// Mints tokens for testing diff --git a/rs/sns/governance/canister/governance.did b/rs/sns/governance/canister/governance.did index 4c4852253df..df24217066d 100644 --- a/rs/sns/governance/canister/governance.did +++ b/rs/sns/governance/canister/governance.did @@ -806,7 +806,10 @@ type UpgradeJournal = record { entries : vec UpgradeJournalEntry; }; -type GetUpgradeJournalRequest = record {}; +type GetUpgradeJournalRequest = record { + limit : opt nat64; + offset : opt nat64; +}; type GetUpgradeJournalResponse = record { upgrade_steps : opt Versions; @@ -814,6 +817,7 @@ type GetUpgradeJournalResponse = record { target_version : opt Version; deployed_version : opt Version; upgrade_journal : opt UpgradeJournal; + upgrade_journal_entry_count: opt nat64; }; service : (Governance) -> { diff --git a/rs/sns/governance/canister/governance_test.did b/rs/sns/governance/canister/governance_test.did index 7af49fcbe8e..27737ff3313 100644 --- a/rs/sns/governance/canister/governance_test.did +++ b/rs/sns/governance/canister/governance_test.did @@ -820,7 +820,10 @@ type UpgradeJournal = record { entries : vec UpgradeJournalEntry; }; -type GetUpgradeJournalRequest = record {}; +type GetUpgradeJournalRequest = record { + limit : opt nat64; + offset : opt nat64; +}; type GetUpgradeJournalResponse = record { upgrade_steps : opt Versions; @@ -828,6 +831,7 @@ type GetUpgradeJournalResponse = record { target_version : opt Version; deployed_version : opt Version; upgrade_journal : opt UpgradeJournal; + upgrade_journal_entry_count: opt nat64; }; type AdvanceTargetVersionRequest = record { target_version : opt Version; }; diff --git a/rs/sns/governance/proto/ic_sns_governance/pb/v1/governance.proto b/rs/sns/governance/proto/ic_sns_governance/pb/v1/governance.proto index 88d245e4568..2b2fbcb0714 100644 --- a/rs/sns/governance/proto/ic_sns_governance/pb/v1/governance.proto +++ b/rs/sns/governance/proto/ic_sns_governance/pb/v1/governance.proto @@ -2271,7 +2271,15 @@ message UpgradeJournal { // The upgrade journal contains all the information neede to audit previous SNS upgrades and understand its current state. // It is being implemented as part of the "effortless SNS upgrade" feature. -message GetUpgradeJournalRequest {} +message GetUpgradeJournalRequest { + // Maximum number of journal entries to return. + // If not specified, defaults to 100. Values larger than 100 will be capped at 100. + optional uint64 limit = 1; + + // The starting index from which to return entries, counting from the oldest entry (0). + // If not specified, return the most recent entries. + optional uint64 offset = 2; +} message GetUpgradeJournalResponse { Governance.Versions upgrade_steps = 1; @@ -2283,6 +2291,7 @@ message GetUpgradeJournalResponse { Governance.Version deployed_version = 5; UpgradeJournal upgrade_journal = 4; + optional uint64 upgrade_journal_entry_count = 6; } // A request to mint tokens for a particular principal. The associated endpoint diff --git a/rs/sns/governance/src/gen/ic_sns_governance.pb.v1.rs b/rs/sns/governance/src/gen/ic_sns_governance.pb.v1.rs index 167cd7054b7..55691a9792d 100644 --- a/rs/sns/governance/src/gen/ic_sns_governance.pb.v1.rs +++ b/rs/sns/governance/src/gen/ic_sns_governance.pb.v1.rs @@ -3675,7 +3675,16 @@ pub struct UpgradeJournal { PartialEq, ::prost::Message, )] -pub struct GetUpgradeJournalRequest {} +pub struct GetUpgradeJournalRequest { + /// Maximum number of journal entries to return. + /// If not specified, defaults to 100. Values larger than 100 will be capped at 100. + #[prost(uint64, optional, tag = "1")] + pub limit: ::core::option::Option, + /// The starting index from which to return entries, counting from the oldest entry (0). + /// If not specified, return the most recent entries. + #[prost(uint64, optional, tag = "2")] + pub offset: ::core::option::Option, +} #[derive( candid::CandidType, candid::Deserialize, @@ -3698,6 +3707,8 @@ pub struct GetUpgradeJournalResponse { pub deployed_version: ::core::option::Option, #[prost(message, optional, tag = "4")] pub upgrade_journal: ::core::option::Option, + #[prost(uint64, optional, tag = "6")] + pub upgrade_journal_entry_count: ::core::option::Option, } /// A request to mint tokens for a particular principal. The associated endpoint /// is only available on SNS governance, and only then when SNS governance is diff --git a/rs/sns/governance/src/governance.rs b/rs/sns/governance/src/governance.rs index 82466e131b3..d1818bcb444 100644 --- a/rs/sns/governance/src/governance.rs +++ b/rs/sns/governance/src/governance.rs @@ -149,6 +149,8 @@ pub const MATURITY_DISBURSEMENT_DELAY_SECONDS: u64 = 7 * 24 * 3600; pub const HEAP_SIZE_SOFT_LIMIT_IN_WASM32_PAGES: usize = MAX_HEAP_SIZE_IN_KIB / WASM32_PAGE_SIZE_IN_KIB * 7 / 8; +pub const MAX_UPGRADE_JOURNAL_ENTRIES_PER_REQUEST: u64 = 100; + /// Prefixes each log line for this canister. pub fn log_prefix() -> String { "[Governance] ".into() diff --git a/rs/sns/governance/src/governance/advance_target_sns_version_tests.rs b/rs/sns/governance/src/governance/advance_target_sns_version_tests.rs index 43451787c2a..8536645ff08 100644 --- a/rs/sns/governance/src/governance/advance_target_sns_version_tests.rs +++ b/rs/sns/governance/src/governance/advance_target_sns_version_tests.rs @@ -13,7 +13,9 @@ use crate::sns_upgrade::{GetSnsCanistersSummaryRequest, GetSnsCanistersSummaryRe use crate::{ pb::v1::{ governance::{CachedUpgradeSteps as CachedUpgradeStepsPb, Versions}, - ProposalData, Tally, UpgradeSnsToNextVersion, + upgrade_journal_entry::Event, + GetUpgradeJournalRequest, ProposalData, Tally, UpgradeJournal, UpgradeJournalEntry, + UpgradeSnsToNextVersion, }, sns_upgrade::{ListUpgradeStep, ListUpgradeStepsRequest, ListUpgradeStepsResponse, SnsVersion}, types::test_helpers::NativeEnvironment, @@ -1174,3 +1176,174 @@ fn add_environment_mock_list_upgrade_steps_call( .unwrap()), ); } + +#[test] +fn test_get_upgrade_journal_pagination() { + let mut governance = Governance::new( + GovernanceProto { + upgrade_journal: Some(UpgradeJournal { + entries: vec![ + UpgradeJournalEntry { + event: Some(Event::UpgradeStarted( + upgrade_journal_entry::UpgradeStarted { + current_version: None, + expected_version: None, + reason: None, + }, + )), + timestamp_seconds: Some(1), + }, + UpgradeJournalEntry { + event: Some(Event::UpgradeOutcome( + upgrade_journal_entry::UpgradeOutcome { + human_readable: Some("success".to_string()), + status: None, + }, + )), + timestamp_seconds: Some(2), + }, + UpgradeJournalEntry { + event: Some(Event::UpgradeStarted( + upgrade_journal_entry::UpgradeStarted { + current_version: None, + expected_version: None, + reason: None, + }, + )), + timestamp_seconds: Some(3), + }, + ], + }), + ..basic_governance_proto() + } + .try_into() + .unwrap(), + Box::new(NativeEnvironment::new(None)), + Box::new(DoNothingLedger {}), + Box::new(DoNothingLedger {}), + Box::new(FakeCmc::new()), + ); + + // Scenario 1: Default behavior shows most recent entries + let response = governance.get_upgrade_journal(GetUpgradeJournalRequest { + offset: None, + limit: Some(2), + }); + assert_eq!( + response.clone().upgrade_journal.unwrap().entries[..], + governance.proto.upgrade_journal.as_ref().unwrap().entries[1..], + ); + // Assert the timestamps explicitly for good measure + assert_eq!( + response + .clone() + .upgrade_journal + .unwrap() + .entries + .into_iter() + .filter_map(|event| event.timestamp_seconds) + .collect::>(), + vec![2, 3], + ); + + // Scenario 2: Explicit start index + let response = governance.get_upgrade_journal(GetUpgradeJournalRequest { + offset: Some(0), + limit: Some(2), + }); + assert_eq!( + response.clone().upgrade_journal.unwrap().entries[..], + governance.proto.upgrade_journal.as_ref().unwrap().entries[0..2], + ); + // Assert the timestamps explicitly for good measure + assert_eq!( + response + .clone() + .upgrade_journal + .unwrap() + .entries + .into_iter() + .filter_map(|event| event.timestamp_seconds) + .collect::>(), + vec![1, 2], + ); + + // Scenario 3: Start index beyond bounds returns empty list + let response = governance.get_upgrade_journal(GetUpgradeJournalRequest { + offset: Some(10), + limit: Some(2), + }); + assert_eq!(response.upgrade_journal.unwrap().entries, Vec::new()); + + // Scenario 4: slice goes past the end of available entries (i.e. offset + max_entries much greater than len) + let response = governance.get_upgrade_journal(GetUpgradeJournalRequest { + offset: Some(1), + limit: Some(10), + }); + assert_eq!( + response.clone().upgrade_journal.unwrap().entries[..], + governance.proto.upgrade_journal.as_ref().unwrap().entries[1..], + ); + // Assert the timestamps explicitly for good measure + assert_eq!( + response + .clone() + .upgrade_journal + .unwrap() + .entries + .into_iter() + .filter_map(|event| event.timestamp_seconds) + .collect::>(), + vec![2, 3], + ); + + // Scenario 5: API obeys the global limit when there are tons of entries + governance.proto.upgrade_journal = Some(UpgradeJournal { + entries: vec![ + UpgradeJournalEntry::default(); + MAX_UPGRADE_JOURNAL_ENTRIES_PER_REQUEST as usize + 1 + ], + }); + let response = governance.get_upgrade_journal(GetUpgradeJournalRequest { + offset: None, + limit: Some(MAX_UPGRADE_JOURNAL_ENTRIES_PER_REQUEST + 1), + }); + assert_eq!( + response.upgrade_journal.unwrap().entries[..], + governance.proto.upgrade_journal.as_ref().unwrap().entries + [..(MAX_UPGRADE_JOURNAL_ENTRIES_PER_REQUEST as usize)], + ); + + // Scenario 6: The tail of the list is returned when no offset is specified + governance + .proto + .upgrade_journal + .as_mut() + .unwrap() + .entries + .push(UpgradeJournalEntry { + event: Some(Event::UpgradeOutcome( + upgrade_journal_entry::UpgradeOutcome { + human_readable: Some("success".to_string()), + status: None, + }, + )), + timestamp_seconds: Some(220293), // crazy timestamp here to make sure tihs entry is unique + }); + let response = governance.get_upgrade_journal(GetUpgradeJournalRequest { + offset: None, + limit: Some(1), + }); + assert_eq!( + response.upgrade_journal.unwrap().entries, + vec![governance + .proto + .upgrade_journal + .as_ref() + .unwrap() + .entries + .last() + .unwrap() + .clone()], + ); +} diff --git a/rs/sns/governance/src/pb/conversions.rs b/rs/sns/governance/src/pb/conversions.rs index ad121ed0de9..fe8addfd83c 100644 --- a/rs/sns/governance/src/pb/conversions.rs +++ b/rs/sns/governance/src/pb/conversions.rs @@ -3420,13 +3420,15 @@ impl From for pb::UpgradeJournal { } impl From for pb_api::GetUpgradeJournalRequest { - fn from(_: pb::GetUpgradeJournalRequest) -> Self { - Self {} + fn from(request: pb::GetUpgradeJournalRequest) -> Self { + let pb::GetUpgradeJournalRequest { limit, offset } = request; + Self { limit, offset } } } impl From for pb::GetUpgradeJournalRequest { - fn from(_: pb_api::GetUpgradeJournalRequest) -> Self { - Self {} + fn from(request: pb_api::GetUpgradeJournalRequest) -> Self { + let pb_api::GetUpgradeJournalRequest { limit, offset } = request; + Self { limit, offset } } } @@ -3438,6 +3440,7 @@ impl From for pb_api::GetUpgradeJournalResponse { target_version: item.target_version.map(|x| x.into()), deployed_version: item.deployed_version.map(|x| x.into()), upgrade_journal: item.upgrade_journal.map(|x| x.into()), + upgrade_journal_entry_count: item.upgrade_journal_entry_count, } } } @@ -3449,6 +3452,7 @@ impl From for pb::GetUpgradeJournalResponse { target_version: item.target_version.map(|x| x.into()), deployed_version: item.deployed_version.map(|x| x.into()), upgrade_journal: item.upgrade_journal.map(|x| x.into()), + upgrade_journal_entry_count: item.upgrade_journal_entry_count, } } } diff --git a/rs/sns/governance/src/upgrade_journal.rs b/rs/sns/governance/src/upgrade_journal.rs index e1bd8345dd3..1ca20da4916 100644 --- a/rs/sns/governance/src/upgrade_journal.rs +++ b/rs/sns/governance/src/upgrade_journal.rs @@ -1,8 +1,9 @@ -use crate::governance::Governance; +use crate::governance::{Governance, MAX_UPGRADE_JOURNAL_ENTRIES_PER_REQUEST}; use crate::pb::v1::{ governance::{Version, Versions}, upgrade_journal_entry::{self, upgrade_outcome, upgrade_started}, - Empty, GetUpgradeJournalResponse, ProposalId, UpgradeJournal, UpgradeJournalEntry, + Empty, GetUpgradeJournalRequest, GetUpgradeJournalResponse, ProposalId, UpgradeJournal, + UpgradeJournalEntry, }; impl upgrade_journal_entry::UpgradeStepsRefreshed { @@ -129,24 +130,51 @@ impl Governance { } } - pub fn get_upgrade_journal(&self) -> GetUpgradeJournalResponse { - let cached_upgrade_steps = self.proto.cached_upgrade_steps.clone(); - match cached_upgrade_steps { + pub fn get_upgrade_journal( + &self, + request: GetUpgradeJournalRequest, + ) -> GetUpgradeJournalResponse { + let upgrade_journal = self.proto.upgrade_journal.as_ref().map(|journal| { + let limit = request + .limit + .unwrap_or(MAX_UPGRADE_JOURNAL_ENTRIES_PER_REQUEST) + .min(MAX_UPGRADE_JOURNAL_ENTRIES_PER_REQUEST) as usize; + let offset = request + .offset + .map(|offset| offset as usize) + .unwrap_or_else(|| journal.entries.len().saturating_sub(limit)); + let entries = journal + .entries + .iter() + .skip(offset) + .take(limit) + .cloned() + .collect(); + UpgradeJournal { entries } + }); + let upgrade_journal_entry_count = self + .proto + .upgrade_journal + .as_ref() + .map(|journal| journal.entries.len() as u64); + + let upgrade_steps = self.proto.cached_upgrade_steps.clone(); + match upgrade_steps { Some(cached_upgrade_steps) => GetUpgradeJournalResponse { upgrade_steps: cached_upgrade_steps.upgrade_steps, response_timestamp_seconds: cached_upgrade_steps.response_timestamp_seconds, target_version: self.proto.target_version.clone(), deployed_version: self.proto.deployed_version.clone(), - // TODO(NNS1-3416): Bound the size of the response. - upgrade_journal: self.proto.upgrade_journal.clone(), + upgrade_journal, + upgrade_journal_entry_count, }, None => GetUpgradeJournalResponse { upgrade_steps: None, response_timestamp_seconds: None, target_version: None, deployed_version: self.proto.deployed_version.clone(), - // TODO(NNS1-3416): Bound the size of the response. - upgrade_journal: self.proto.upgrade_journal.clone(), + upgrade_journal, + upgrade_journal_entry_count, }, } } From bc673893c4066a26f5c30b00d1ede0d19c97730b Mon Sep 17 00:00:00 2001 From: jasonz-dfinity <133917836+jasonz-dfinity@users.noreply.github.com> Date: Wed, 4 Dec 2024 11:05:26 -0800 Subject: [PATCH 09/23] feat(nns): Move spawn_neurons to a one-minute timer (#2934) # Why As we plan to move neurons to stable storage, the cost for scanning neurons to spawn will increase (80x). Therefore, to keep the average instruction count similar, we reduce its frequency (twice per second -> once every minute) # What * Schedule a timer to run `maybe_spawn_neurons()` every minute * Remove `spawn_neurons` from heartbeat # Tests Verified that `bazel test //rs/nns/integration_tests:integration_tests_test_src/governance_neurons --test_filter=test_spawn_neuron` fails if the `spawn_neurons` is removed from heartbeat but not added to timer --- rs/nns/governance/canister/canister.rs | 11 +++++++++++ rs/nns/governance/src/governance.rs | 5 +---- rs/nns/governance/tests/governance.rs | 17 ++++++++++------- 3 files changed, 22 insertions(+), 11 deletions(-) diff --git a/rs/nns/governance/canister/canister.rs b/rs/nns/governance/canister/canister.rs index 4e4d88a1e98..9cf20585bcb 100644 --- a/rs/nns/governance/canister/canister.rs +++ b/rs/nns/governance/canister/canister.rs @@ -165,6 +165,7 @@ fn schedule_timers() { schedule_seeding(Duration::from_nanos(0)); schedule_adjust_neurons_storage(Duration::from_nanos(0), NeuronIdProto { id: 0 }); schedule_prune_following(Duration::from_secs(0)); + schedule_spawn_neurons(); } // Seeding interval seeks to find a balance between the need for rng secrecy, and @@ -268,6 +269,16 @@ fn schedule_adjust_neurons_storage(delay: Duration, start_neuron_id: NeuronIdPro }); } +const SPAWN_NEURONS_INTERVAL: Duration = Duration::from_secs(60); + +fn schedule_spawn_neurons() { + ic_cdk_timers::set_timer_interval(SPAWN_NEURONS_INTERVAL, || { + spawn(async { + governance_mut().maybe_spawn_neurons().await; + }); + }); +} + struct CanisterEnv { rng: Option, time_warp: GovTimeWarp, diff --git a/rs/nns/governance/src/governance.rs b/rs/nns/governance/src/governance.rs index fb8fed7e84d..809f901bf08 100644 --- a/rs/nns/governance/src/governance.rs +++ b/rs/nns/governance/src/governance.rs @@ -6558,9 +6558,6 @@ impl Governance { // Try to update maturity modulation (once per day). } else if self.should_update_maturity_modulation() { self.update_maturity_modulation().await; - // Try to spawn neurons (potentially multiple times per day). - } else if self.can_spawn_neurons() { - self.spawn_neurons().await; } else { // This is the lowest-priority async task. All other tasks should have their own // `else if`, like the ones above. @@ -6702,7 +6699,7 @@ impl Governance { /// This means that programming in this method needs to be extra-defensive on the handling of results so that /// we're sure not to trap after we've acquired the global lock and made an async call, as otherwise the global /// lock will be permanently held and no spawning will occur until a upgrade to fix it is made. - async fn spawn_neurons(&mut self) { + pub async fn maybe_spawn_neurons(&mut self) { if !self.can_spawn_neurons() { return; } diff --git a/rs/nns/governance/tests/governance.rs b/rs/nns/governance/tests/governance.rs index 86023027262..d10fe64cc6f 100644 --- a/rs/nns/governance/tests/governance.rs +++ b/rs/nns/governance/tests/governance.rs @@ -5737,7 +5737,7 @@ fn test_seed_neuron_split() { // Spawn neurons has the least priority in the periodic tasks, so we need to run // them often enough to make sure it happens. -fn run_periodic_tasks_on_governance_often_enough_to_spawn(gov: &mut Governance) { +fn run_periodic_tasks_often_enough_to_update_maturity_modulation(gov: &mut Governance) { for _i in 0..5 { gov.run_periodic_tasks().now_or_never(); } @@ -5766,6 +5766,7 @@ fn test_neuron_spawn() { from, nonce, ); + run_periodic_tasks_often_enough_to_update_maturity_modulation(&mut gov); let now = driver.now(); assert_eq!( @@ -5898,13 +5899,13 @@ fn test_neuron_spawn() { let creation_timestamp = driver.now(); // Running periodic tasks shouldn't cause the ICP to be minted. - run_periodic_tasks_on_governance_often_enough_to_spawn(&mut gov); + gov.maybe_spawn_neurons().now_or_never().unwrap(); driver.assert_num_neuron_accounts_exist(1); // Advance the time by one week, should cause the neuron's ICP // to be minted. driver.advance_time_by(7 * 86400); - run_periodic_tasks_on_governance_often_enough_to_spawn(&mut gov); + gov.maybe_spawn_neurons().now_or_never().unwrap(); driver.assert_num_neuron_accounts_exist(2); let child_neuron = gov @@ -5953,6 +5954,7 @@ fn test_neuron_spawn_with_subaccount() { from, nonce, ); + run_periodic_tasks_often_enough_to_update_maturity_modulation(&mut gov); let now = driver.now(); assert_eq!( @@ -6035,7 +6037,7 @@ fn test_neuron_spawn_with_subaccount() { driver.assert_num_neuron_accounts_exist(1); // Running periodic tasks shouldn't cause the ICP to be minted. - run_periodic_tasks_on_governance_often_enough_to_spawn(&mut gov); + gov.maybe_spawn_neurons().now_or_never().unwrap(); driver.assert_num_neuron_accounts_exist(1); let parent_neuron = gov @@ -6047,7 +6049,7 @@ fn test_neuron_spawn_with_subaccount() { // Advance the time by one week, should cause the neuron's ICP // to be minted. driver.advance_time_by(7 * 86400); - run_periodic_tasks_on_governance_often_enough_to_spawn(&mut gov); + gov.maybe_spawn_neurons().now_or_never().unwrap(); driver.assert_num_neuron_accounts_exist(2); let child_neuron = gov @@ -6132,6 +6134,7 @@ fn assert_neuron_spawn_partial( from, nonce, ); + run_periodic_tasks_often_enough_to_update_maturity_modulation(&mut gov); let neuron = gov .with_neuron(&id, |neuron| neuron.clone()) @@ -6193,7 +6196,7 @@ fn assert_neuron_spawn_partial( .expect("The parent neuron is missing"); // Running periodic tasks shouldn't cause the ICP to be minted. - run_periodic_tasks_on_governance_often_enough_to_spawn(&mut gov); + gov.maybe_spawn_neurons().now_or_never().unwrap(); driver.assert_num_neuron_accounts_exist(1); // Some maturity should be remaining on the parent neuron. @@ -6205,7 +6208,7 @@ fn assert_neuron_spawn_partial( // Advance the time by one week, should cause the neuron's ICP // to be minted. driver.advance_time_by(7 * 86400); - run_periodic_tasks_on_governance_often_enough_to_spawn(&mut gov); + gov.maybe_spawn_neurons().now_or_never().unwrap(); driver.assert_num_neuron_accounts_exist(2); let child_neuron = gov From 09f1152899a210c6a8f5d498df169c47af8f0a3e Mon Sep 17 00:00:00 2001 From: jasonz-dfinity <133917836+jasonz-dfinity@users.noreply.github.com> Date: Wed, 4 Dec 2024 11:39:56 -0800 Subject: [PATCH 10/23] perf(nns): Add a benchmark for drawing neurons fund maturity (#2973) Drawing maturity from neurons fund requires iterating through neurons, and that can be expensive when we move neurons to stable structures. Therefore we want a benchmark so that the future improvements can be measured and regressions can be prevented. --- .../governance/canbench/canbench_results.yml | 42 ++++++++----- rs/nns/governance/src/neuron_store/benches.rs | 62 +++++++++++++++++++ 2 files changed, 89 insertions(+), 15 deletions(-) diff --git a/rs/nns/governance/canbench/canbench_results.yml b/rs/nns/governance/canbench/canbench_results.yml index c63d6d25ede..be48be33bb9 100644 --- a/rs/nns/governance/canbench/canbench_results.yml +++ b/rs/nns/governance/canbench/canbench_results.yml @@ -1,61 +1,73 @@ benches: add_neuron_active_maximum: total: - instructions: 36108795 + instructions: 36183557 heap_increase: 1 stable_memory_increase: 0 scopes: {} add_neuron_active_typical: total: - instructions: 1832299 + instructions: 1835560 heap_increase: 0 stable_memory_increase: 0 scopes: {} add_neuron_inactive_maximum: total: - instructions: 96119480 + instructions: 96170368 heap_increase: 1 stable_memory_increase: 0 scopes: {} add_neuron_inactive_typical: total: - instructions: 7375058 + instructions: 7370817 heap_increase: 0 stable_memory_increase: 0 scopes: {} cascading_vote_all_heap: total: - instructions: 32012069 + instructions: 31843424 heap_increase: 0 stable_memory_increase: 0 scopes: {} cascading_vote_heap_neurons_stable_index: total: - instructions: 54483561 + instructions: 54151176 heap_increase: 0 stable_memory_increase: 0 scopes: {} cascading_vote_stable_everything: total: - instructions: 160682199 + instructions: 160393192 heap_increase: 0 stable_memory_increase: 0 scopes: {} cascading_vote_stable_neurons_with_heap_index: total: - instructions: 138319942 + instructions: 138194675 heap_increase: 0 stable_memory_increase: 0 scopes: {} centralized_following_all_stable: total: - instructions: 66145106 + instructions: 66026878 heap_increase: 0 stable_memory_increase: 0 scopes: {} compute_ballots_for_new_proposal_with_stable_neurons: total: - instructions: 1790000 + instructions: 1735641 + heap_increase: 0 + stable_memory_increase: 0 + scopes: {} + draw_maturity_from_neurons_fund_heap: + total: + instructions: 7268033 + heap_increase: 0 + stable_memory_increase: 0 + scopes: {} + draw_maturity_from_neurons_fund_stable: + total: + instructions: 56530796 heap_increase: 0 stable_memory_increase: 0 scopes: {} @@ -85,31 +97,31 @@ benches: scopes: {} neuron_metrics_calculation_heap: total: - instructions: 528806 + instructions: 536802 heap_increase: 0 stable_memory_increase: 0 scopes: {} neuron_metrics_calculation_stable: total: - instructions: 1890000 + instructions: 1872149 heap_increase: 0 stable_memory_increase: 0 scopes: {} range_neurons_performance: total: - instructions: 48528017 + instructions: 47346463 heap_increase: 0 stable_memory_increase: 0 scopes: {} single_vote_all_stable: total: - instructions: 364769 + instructions: 364027 heap_increase: 0 stable_memory_increase: 0 scopes: {} update_recent_ballots_stable_memory: total: - instructions: 13455039 + instructions: 13388428 heap_increase: 0 stable_memory_increase: 0 scopes: {} diff --git a/rs/nns/governance/src/neuron_store/benches.rs b/rs/nns/governance/src/neuron_store/benches.rs index e0d592d74c2..76784117942 100644 --- a/rs/nns/governance/src/neuron_store/benches.rs +++ b/rs/nns/governance/src/neuron_store/benches.rs @@ -2,6 +2,7 @@ use super::*; use crate::{ governance::{MAX_FOLLOWEES_PER_TOPIC, MAX_NEURON_RECENT_BALLOTS, MAX_NUM_HOT_KEYS_PER_NEURON}, neuron::{DissolveStateAndAge, NeuronBuilder}, + neurons_fund::{NeuronsFund, NeuronsFundNeuronPortion, NeuronsFundSnapshot}, now_seconds, pb::v1::{neuron::Followees, BallotInfo, Vote}, temporarily_disable_active_neurons_in_stable_memory, @@ -13,6 +14,7 @@ use ic_nervous_system_common::E8; use ic_nns_common::pb::v1::ProposalId; use maplit::hashmap; use rand::{rngs::StdRng, RngCore, SeedableRng}; +use std::collections::BTreeSet; /// Whether the neuron should be stored in heap or stable storage. #[derive(Copy, Clone)] @@ -338,3 +340,63 @@ fn list_neurons_ready_to_unstake_maturity_stable() -> BenchResult { neuron_store.list_neurons_ready_to_unstake_maturity(now_seconds()); }) } + +fn build_neurons_fund_portion(neuron: &Neuron, amount_icp_e8s: u64) -> NeuronsFundNeuronPortion { + let maturity_equivalent_icp_e8s = neuron.maturity_e8s_equivalent; + assert!(amount_icp_e8s <= maturity_equivalent_icp_e8s); + let id = neuron.id(); + let controller = neuron.controller(); + let hotkeys = neuron.hot_keys.clone(); + let is_capped = false; + + NeuronsFundNeuronPortion { + id, + amount_icp_e8s, + maturity_equivalent_icp_e8s, + controller, + hotkeys, + is_capped, + } +} + +#[bench(raw)] +fn draw_maturity_from_neurons_fund_heap() -> BenchResult { + let _t = temporarily_disable_active_neurons_in_stable_memory(); + let mut rng = new_rng(); + let mut neuron_store = NeuronStore::new(BTreeMap::new()); + let mut neurons_fund_neurons = BTreeSet::new(); + for _ in 0..100 { + let mut neuron = build_neuron(&mut rng, NeuronLocation::Heap, NeuronSize::Typical); + neuron.maturity_e8s_equivalent = 2_000_000_000; + neurons_fund_neurons.insert(build_neurons_fund_portion(&neuron, 1_000_000_000)); + neuron_store.add_neuron(neuron).unwrap(); + } + let neurons_fund_snapshot = NeuronsFundSnapshot::new(neurons_fund_neurons); + + bench_fn(|| { + neuron_store + .draw_maturity_from_neurons_fund(&neurons_fund_snapshot) + .unwrap(); + }) +} + +#[bench(raw)] +fn draw_maturity_from_neurons_fund_stable() -> BenchResult { + let _t = temporarily_enable_active_neurons_in_stable_memory(); + let mut rng = new_rng(); + let mut neuron_store = NeuronStore::new(BTreeMap::new()); + let mut neurons_fund_neurons = BTreeSet::new(); + for _ in 0..100 { + let mut neuron = build_neuron(&mut rng, NeuronLocation::Heap, NeuronSize::Typical); + neuron.maturity_e8s_equivalent = 2_000_000_000; + neurons_fund_neurons.insert(build_neurons_fund_portion(&neuron, 1_000_000_000)); + neuron_store.add_neuron(neuron).unwrap(); + } + let neurons_fund_snapshot = NeuronsFundSnapshot::new(neurons_fund_neurons); + + bench_fn(|| { + neuron_store + .draw_maturity_from_neurons_fund(&neurons_fund_snapshot) + .unwrap(); + }) +} From 22dd92067ad8bbbde6d7eae266eb46b28031e333 Mon Sep 17 00:00:00 2001 From: jasonz-dfinity <133917836+jasonz-dfinity@users.noreply.github.com> Date: Wed, 4 Dec 2024 12:35:47 -0800 Subject: [PATCH 11/23] feat(nns): Improve drawing/refunding neurons fund maturity when neurons in stable memory feature is enabled (#2975) # Why Drawing/refunding neurons fund maturity iterates through many neurons and modify them, and can be more expensive when neurons are all moved to stable structures. Therefore we want to improve the performance by accessing the main section directly. # What * Refactor the existing logic to modify maturity into `modify_neuron_maturity` * Add the `StableNeuronStore::with_main_part_mut` for modifying the main neuron section directly * Add the `self.use_stable_memory_for_all_neurons` to modify neuron maturity based on its location * The benchmark `draw_maturity_from_neurons_fund_stable` improved by 81.8% --- .../governance/canbench/canbench_results.yml | 42 +++++++-------- rs/nns/governance/src/neuron_store.rs | 52 +++++++++++++++++++ rs/nns/governance/src/neurons_fund.rs | 22 ++++---- rs/nns/governance/src/storage/neurons.rs | 18 +++++++ 4 files changed, 101 insertions(+), 33 deletions(-) diff --git a/rs/nns/governance/canbench/canbench_results.yml b/rs/nns/governance/canbench/canbench_results.yml index be48be33bb9..e15d386e650 100644 --- a/rs/nns/governance/canbench/canbench_results.yml +++ b/rs/nns/governance/canbench/canbench_results.yml @@ -1,127 +1,127 @@ benches: add_neuron_active_maximum: total: - instructions: 36183557 + instructions: 36059483 heap_increase: 1 stable_memory_increase: 0 scopes: {} add_neuron_active_typical: total: - instructions: 1835560 + instructions: 1830111 heap_increase: 0 stable_memory_increase: 0 scopes: {} add_neuron_inactive_maximum: total: - instructions: 96170368 + instructions: 96070090 heap_increase: 1 stable_memory_increase: 0 scopes: {} add_neuron_inactive_typical: total: - instructions: 7370817 + instructions: 7372887 heap_increase: 0 stable_memory_increase: 0 scopes: {} cascading_vote_all_heap: total: - instructions: 31843424 + instructions: 31756954 heap_increase: 0 stable_memory_increase: 0 scopes: {} cascading_vote_heap_neurons_stable_index: total: - instructions: 54151176 + instructions: 54261919 heap_increase: 0 stable_memory_increase: 0 scopes: {} cascading_vote_stable_everything: total: - instructions: 160393192 + instructions: 160631204 heap_increase: 0 stable_memory_increase: 0 scopes: {} cascading_vote_stable_neurons_with_heap_index: total: - instructions: 138194675 + instructions: 138235474 heap_increase: 0 stable_memory_increase: 0 scopes: {} centralized_following_all_stable: total: - instructions: 66026878 + instructions: 66109851 heap_increase: 0 stable_memory_increase: 0 scopes: {} compute_ballots_for_new_proposal_with_stable_neurons: total: - instructions: 1735641 + instructions: 1826966 heap_increase: 0 stable_memory_increase: 0 scopes: {} draw_maturity_from_neurons_fund_heap: total: - instructions: 7268033 + instructions: 7244019 heap_increase: 0 stable_memory_increase: 0 scopes: {} draw_maturity_from_neurons_fund_stable: total: - instructions: 56530796 + instructions: 10256384 heap_increase: 0 stable_memory_increase: 0 scopes: {} list_neurons_ready_to_unstake_maturity_heap: total: - instructions: 471240 + instructions: 474237 heap_increase: 0 stable_memory_increase: 0 scopes: {} list_neurons_ready_to_unstake_maturity_stable: total: - instructions: 36291394 + instructions: 37619197 heap_increase: 0 stable_memory_increase: 0 scopes: {} list_ready_to_spawn_neuron_ids_heap: total: - instructions: 459353 + instructions: 462350 heap_increase: 0 stable_memory_increase: 0 scopes: {} list_ready_to_spawn_neuron_ids_stable: total: - instructions: 36280095 + instructions: 37607898 heap_increase: 0 stable_memory_increase: 0 scopes: {} neuron_metrics_calculation_heap: total: - instructions: 536802 + instructions: 529394 heap_increase: 0 stable_memory_increase: 0 scopes: {} neuron_metrics_calculation_stable: total: - instructions: 1872149 + instructions: 1861928 heap_increase: 0 stable_memory_increase: 0 scopes: {} range_neurons_performance: total: - instructions: 47346463 + instructions: 48539417 heap_increase: 0 stable_memory_increase: 0 scopes: {} single_vote_all_stable: total: - instructions: 364027 + instructions: 364631 heap_increase: 0 stable_memory_increase: 0 scopes: {} update_recent_ballots_stable_memory: total: - instructions: 13388428 + instructions: 13454595 heap_increase: 0 stable_memory_increase: 0 scopes: {} diff --git a/rs/nns/governance/src/neuron_store.rs b/rs/nns/governance/src/neuron_store.rs index 39ec0c52662..f81af03f855 100644 --- a/rs/nns/governance/src/neuron_store.rs +++ b/rs/nns/governance/src/neuron_store.rs @@ -66,6 +66,9 @@ pub enum NeuronStoreError { neuron_id: NeuronId, }, NeuronIdGenerationUnavailable, + InvalidOperation { + reason: String, + }, } impl NeuronStoreError { @@ -170,6 +173,9 @@ impl Display for NeuronStoreError { Likely due to uninitialized RNG." ) } + NeuronStoreError::InvalidOperation { reason } => { + write!(f, "Invalid operation: {}", reason) + } } } } @@ -187,6 +193,7 @@ impl From for GovernanceError { NeuronStoreError::InvalidData { .. } => ErrorType::PreconditionFailed, NeuronStoreError::NotAuthorizedToGetFullNeuron { .. } => ErrorType::NotAuthorized, NeuronStoreError::NeuronIdGenerationUnavailable => ErrorType::Unavailable, + NeuronStoreError::InvalidOperation { .. } => ErrorType::PreconditionFailed, }; GovernanceError::new_with_message(error_type, value.to_string()) } @@ -1186,6 +1193,51 @@ impl NeuronStore { Ok(()) } + /// Modifies the maturity of the neuron. + pub fn modify_neuron_maturity( + &mut self, + neuron_id: &NeuronId, + modify: impl FnOnce(u64) -> Result, + ) -> Result<(), NeuronStoreError> { + // When `use_stable_memory_for_all_neurons` is true, all the neurons SHOULD be in the stable + // neuron store. Therefore, there is no need to move the neuron between heap/stable as it + // might become active/inactive due to the change of maturity. + if self.use_stable_memory_for_all_neurons { + // The validity of this approach is based on the assumption that none of the neuron + // indexes can be affected by its maturity. + if self.heap_neurons.contains_key(&neuron_id.id) { + self.heap_neurons + .get_mut(&neuron_id.id) + .map(|neuron| -> Result<(), String> { + let new_maturity = modify(neuron.maturity_e8s_equivalent)?; + neuron.maturity_e8s_equivalent = new_maturity; + Ok(()) + }) + .transpose() + .map_err(|e| NeuronStoreError::InvalidData { reason: e })? + .ok_or_else(|| NeuronStoreError::not_found(*neuron_id)) + } else { + with_stable_neuron_store_mut(|stable_neuron_store| { + stable_neuron_store + .with_main_part_mut(*neuron_id, |neuron| -> Result<(), String> { + let new_maturity = modify(neuron.maturity_e8s_equivalent)?; + neuron.maturity_e8s_equivalent = new_maturity; + Ok(()) + })? + .map_err(|e| NeuronStoreError::InvalidData { reason: e })?; + Ok(()) + }) + } + } else { + self.with_neuron_mut(neuron_id, |neuron| { + let new_maturity = modify(neuron.maturity_e8s_equivalent) + .map_err(|reason| NeuronStoreError::InvalidData { reason })?; + neuron.maturity_e8s_equivalent = new_maturity; + Ok(()) + })? + } + } + // Below are indexes related methods. They don't have a unified interface yet, but NNS1-2507 will change that. // Read methods for indexes. diff --git a/rs/nns/governance/src/neurons_fund.rs b/rs/nns/governance/src/neurons_fund.rs index cda56fcb7c3..e60601ef181 100644 --- a/rs/nns/governance/src/neurons_fund.rs +++ b/rs/nns/governance/src/neurons_fund.rs @@ -1945,21 +1945,19 @@ fn apply_neurons_fund_snapshot( ) -> Result<(), String> { let mut neurons_fund_action_error = vec![]; for (neuron_id, neuron_delta) in snapshot.neurons().iter() { - let refund_result = neuron_store.with_neuron_mut(neuron_id, |nns_neuron| { - let old_nns_neuron_maturity_e8s = nns_neuron.maturity_e8s_equivalent; - let maturity_delta_e8s = neuron_delta.amount_icp_e8s; - nns_neuron.maturity_e8s_equivalent = action - .checked_apply(old_nns_neuron_maturity_e8s, maturity_delta_e8s) - .unwrap_or_else(|verb| { - neurons_fund_action_error.push(format!( + let action_result = neuron_store.modify_neuron_maturity(neuron_id, |old_maturity| { + action + .checked_apply(old_maturity, neuron_delta.amount_icp_e8s) + .map_err(|verb| { + let maturity_delta_e8s = neuron_delta.amount_icp_e8s; + format!( "u64 overflow while {verb} maturity from {neuron_id:?} \ - (*kept* original maturity e8s = {old_nns_neuron_maturity_e8s}; \ + (*kept* original maturity e8s = {old_maturity}; \ requested maturity delta e8s = {maturity_delta_e8s})." - )); - old_nns_neuron_maturity_e8s - }); + ) + }) }); - if let Err(with_neuron_mut_error) = refund_result { + if let Err(with_neuron_mut_error) = action_result { neurons_fund_action_error.push(with_neuron_mut_error.to_string()); } } diff --git a/rs/nns/governance/src/storage/neurons.rs b/rs/nns/governance/src/storage/neurons.rs index fa5394fe632..ddbcfc50421 100644 --- a/rs/nns/governance/src/storage/neurons.rs +++ b/rs/nns/governance/src/storage/neurons.rs @@ -288,6 +288,24 @@ where Ok(()) } + /// Updates the main part of an existing neuron. + pub fn with_main_part_mut( + &mut self, + neuron_id: NeuronId, + f: impl FnOnce(&mut AbridgedNeuron) -> R, + ) -> Result { + let mut main_neuron_part = self + .main + .get(&neuron_id) + // Deal with no entry by blaming it on the caller. + .ok_or_else(|| NeuronStoreError::not_found(neuron_id))?; + + let result = f(&mut main_neuron_part); + self.main.insert(neuron_id, main_neuron_part); + + Ok(result) + } + /// Changes an existing entry. /// /// If the entry does not already exist, returns a NotFound Err. From c07e98caa47d305504307ae2abfc10a1868a1f88 Mon Sep 17 00:00:00 2001 From: jasonz-dfinity <133917836+jasonz-dfinity@users.noreply.github.com> Date: Wed, 4 Dec 2024 12:58:20 -0800 Subject: [PATCH 12/23] chore: Bump canbench and pocket ic mainnet version (#2974) Bumping canbench and the pocket-ic-mainnet versions so that the performance tests based on canbench can be run successfully. --- Cargo.Bazel.Fuzzing.json.lock | 269 +++++++++++++--------------------- Cargo.Bazel.Fuzzing.toml.lock | 57 +++---- Cargo.Bazel.json.lock | 269 +++++++++++++--------------------- Cargo.Bazel.toml.lock | 57 +++---- MODULE.bazel | 4 +- bazel/external_crates.bzl | 4 +- 6 files changed, 250 insertions(+), 410 deletions(-) diff --git a/Cargo.Bazel.Fuzzing.json.lock b/Cargo.Bazel.Fuzzing.json.lock index 39bd2bcf3d2..6967f6c5285 100644 --- a/Cargo.Bazel.Fuzzing.json.lock +++ b/Cargo.Bazel.Fuzzing.json.lock @@ -1,5 +1,5 @@ { - "checksum": "3fbe6f0bc1150dc7e649e3be6d14ab3560751213130f5f2404d9e38092e2d002", + "checksum": "2afa4380416577e8ad0b8c3fe342e4dd718e3d617b3c81f8134ce7e0b9ba9f14", "crates": { "abnf 0.12.0": { "name": "abnf", @@ -9948,14 +9948,14 @@ ], "license_file": "LICENSE-APACHE" }, - "canbench 0.1.7": { + "canbench 0.1.8": { "name": "canbench", - "version": "0.1.7", + "version": "0.1.8", "package_url": "https://github.com/dfinity/canbench", "repository": { "Http": { - "url": "https://static.crates.io/crates/canbench/0.1.7/download", - "sha256": "bf010ed5b327776525d545cef0fd17ffec73af71eb1b304ca11df3494ed65c31" + "url": "https://static.crates.io/crates/canbench/0.1.8/download", + "sha256": "cb548f9e006ad29b160d37e07435c499af7d2741918e18d95ddc87dfe97a0b8d" } }, "targets": [ @@ -9992,7 +9992,7 @@ "deps": { "common": [ { - "id": "canbench-rs 0.1.7", + "id": "canbench-rs 0.1.8", "target": "canbench_rs" }, { @@ -10016,7 +10016,7 @@ "target": "hex" }, { - "id": "pocket-ic 5.0.0", + "id": "pocket-ic 6.0.0", "target": "pocket_ic" }, { @@ -10051,7 +10051,7 @@ "selects": {} }, "edition": "2021", - "version": "0.1.7" + "version": "0.1.8" }, "license": "Apache-2.0", "license_ids": [ @@ -10059,14 +10059,14 @@ ], "license_file": null }, - "canbench-rs 0.1.7": { + "canbench-rs 0.1.8": { "name": "canbench-rs", - "version": "0.1.7", + "version": "0.1.8", "package_url": "https://github.com/dfinity/canbench", "repository": { "Http": { - "url": "https://static.crates.io/crates/canbench-rs/0.1.7/download", - "sha256": "e85a8f1ee95044a770b3d5166a12f55814283cb3aed71b81439dc59960ab76c1" + "url": "https://static.crates.io/crates/canbench-rs/0.1.8/download", + "sha256": "497d900e11ab1891dd9743dd45dbeaada540ce323aa1adc7fc0ce1da2c6e86ff" } }, "targets": [ @@ -10109,13 +10109,13 @@ "proc_macro_deps": { "common": [ { - "id": "canbench-rs-macros 0.1.7", + "id": "canbench-rs-macros 0.1.8", "target": "canbench_rs_macros" } ], "selects": {} }, - "version": "0.1.7" + "version": "0.1.8" }, "license": "Apache-2.0", "license_ids": [ @@ -10123,14 +10123,14 @@ ], "license_file": null }, - "canbench-rs-macros 0.1.7": { + "canbench-rs-macros 0.1.8": { "name": "canbench-rs-macros", - "version": "0.1.7", + "version": "0.1.8", "package_url": "https://github.com/dfinity/canbench", "repository": { "Http": { - "url": "https://static.crates.io/crates/canbench-rs-macros/0.1.7/download", - "sha256": "37aa9dbb190b03569ab14aadf669884a331712d54462c5a6c5b86c9867fe4e65" + "url": "https://static.crates.io/crates/canbench-rs-macros/0.1.8/download", + "sha256": "5a5509bcfe6eeb86f057d46fbf20a2ba6b6bf9a1099b053a8f491cd7a909dfa6" } }, "targets": [ @@ -10170,7 +10170,7 @@ "selects": {} }, "edition": "2021", - "version": "0.1.7" + "version": "0.1.8" }, "license": "Apache-2.0", "license_ids": [ @@ -18565,11 +18565,11 @@ "target": "cached" }, { - "id": "canbench 0.1.7", + "id": "canbench 0.1.8", "target": "canbench" }, { - "id": "canbench-rs 0.1.7", + "id": "canbench-rs 0.1.8", "target": "canbench_rs" }, { @@ -31438,74 +31438,6 @@ ], "license_file": "LICENSE" }, - "ic-cdk 0.13.5": { - "name": "ic-cdk", - "version": "0.13.5", - "package_url": "https://github.com/dfinity/cdk-rs", - "repository": { - "Http": { - "url": "https://static.crates.io/crates/ic-cdk/0.13.5/download", - "sha256": "3b1da6a25b045f9da3c9459c0cb2b0700ac368ee16382975a17185a23b9c18ab" - } - }, - "targets": [ - { - "Library": { - "crate_name": "ic_cdk", - "crate_root": "src/lib.rs", - "srcs": { - "allow_empty": true, - "include": [ - "**/*.rs" - ] - } - } - } - ], - "library_target_name": "ic_cdk", - "common_attrs": { - "compile_data_glob": [ - "**" - ], - "deps": { - "common": [ - { - "id": "candid 0.10.10", - "target": "candid" - }, - { - "id": "ic0 0.21.1", - "target": "ic0" - }, - { - "id": "serde 1.0.214", - "target": "serde" - }, - { - "id": "serde_bytes 0.11.15", - "target": "serde_bytes" - } - ], - "selects": {} - }, - "edition": "2021", - "proc_macro_deps": { - "common": [ - { - "id": "ic-cdk-macros 0.13.2", - "target": "ic_cdk_macros" - } - ], - "selects": {} - }, - "version": "0.13.5" - }, - "license": "Apache-2.0", - "license_ids": [ - "Apache-2.0" - ], - "license_file": "LICENSE" - }, "ic-cdk 0.14.1": { "name": "ic-cdk", "version": "0.14.1", @@ -31844,73 +31776,6 @@ ], "license_file": "LICENSE" }, - "ic-cdk-macros 0.13.2": { - "name": "ic-cdk-macros", - "version": "0.13.2", - "package_url": "https://github.com/dfinity/cdk-rs", - "repository": { - "Http": { - "url": "https://static.crates.io/crates/ic-cdk-macros/0.13.2/download", - "sha256": "a45800053d80a6df839a71aaea5797e723188c0b992618208ca3b941350c7355" - } - }, - "targets": [ - { - "ProcMacro": { - "crate_name": "ic_cdk_macros", - "crate_root": "src/lib.rs", - "srcs": { - "allow_empty": true, - "include": [ - "**/*.rs" - ] - } - } - } - ], - "library_target_name": "ic_cdk_macros", - "common_attrs": { - "compile_data_glob": [ - "**" - ], - "deps": { - "common": [ - { - "id": "candid 0.10.10", - "target": "candid" - }, - { - "id": "proc-macro2 1.0.89", - "target": "proc_macro2" - }, - { - "id": "quote 1.0.37", - "target": "quote" - }, - { - "id": "serde 1.0.214", - "target": "serde" - }, - { - "id": "serde_tokenstream 0.1.7", - "target": "serde_tokenstream" - }, - { - "id": "syn 1.0.109", - "target": "syn" - } - ], - "selects": {} - }, - "edition": "2021", - "version": "0.13.2" - }, - "license": "Apache-2.0", - "license_ids": [ - "Apache-2.0" - ], - "license_file": "LICENSE" - }, "ic-cdk-macros 0.14.0": { "name": "ic-cdk-macros", "version": "0.14.0", @@ -50139,14 +50004,14 @@ ], "license_file": "LICENSE" }, - "pocket-ic 5.0.0": { + "pocket-ic 6.0.0": { "name": "pocket-ic", - "version": "5.0.0", + "version": "6.0.0", "package_url": "https://github.com/dfinity/ic", "repository": { "Http": { - "url": "https://static.crates.io/crates/pocket-ic/5.0.0/download", - "sha256": "beff607d4dbebff8d003453ced669d2645e905de496ca93713f3d47633357e6c" + "url": "https://static.crates.io/crates/pocket-ic/6.0.0/download", + "sha256": "124a2380ca6f557adf8b02517cbfd2f564113230e14cda6f6aadd3dfe156293c" } }, "targets": [ @@ -50183,8 +50048,12 @@ "target": "hex" }, { - "id": "ic-cdk 0.13.5", - "target": "ic_cdk" + "id": "ic-certification 2.6.0", + "target": "ic_certification" + }, + { + "id": "ic-transport-types 0.37.1", + "target": "ic_transport_types" }, { "id": "reqwest 0.12.9", @@ -50202,6 +50071,10 @@ "id": "serde_bytes 0.11.15", "target": "serde_bytes" }, + { + "id": "serde_cbor 0.11.2", + "target": "serde_cbor" + }, { "id": "serde_json 1.0.132", "target": "serde_json" @@ -50214,6 +50087,14 @@ "id": "slog 2.7.0", "target": "slog" }, + { + "id": "strum 0.26.3", + "target": "strum" + }, + { + "id": "thiserror 1.0.68", + "target": "thiserror" + }, { "id": "tokio 1.41.1", "target": "tokio" @@ -50231,10 +50112,26 @@ "target": "tracing_subscriber" } ], - "selects": {} + "selects": { + "cfg(windows)": [ + { + "id": "wslpath 0.0.2", + "target": "wslpath" + } + ] + } }, "edition": "2021", - "version": "5.0.0" + "proc_macro_deps": { + "common": [ + { + "id": "strum_macros 0.26.4", + "target": "strum_macros" + } + ], + "selects": {} + }, + "version": "6.0.0" }, "license": "Apache-2.0", "license_ids": [ @@ -85242,6 +85139,44 @@ ], "license_file": "LICENSE" }, + "wslpath 0.0.2": { + "name": "wslpath", + "version": "0.0.2", + "package_url": "https://github.com/pratikpc/wsl-path-rust", + "repository": { + "Http": { + "url": "https://static.crates.io/crates/wslpath/0.0.2/download", + "sha256": "04a2ecdf2cc4d33a6a93d71bcfbc00bb1f635cdb8029a2cc0709204a045ec7a3" + } + }, + "targets": [ + { + "Library": { + "crate_name": "wslpath", + "crate_root": "src/lib.rs", + "srcs": { + "allow_empty": true, + "include": [ + "**/*.rs" + ] + } + } + } + ], + "library_target_name": "wslpath", + "common_attrs": { + "compile_data_glob": [ + "**" + ], + "edition": "2018", + "version": "0.0.2" + }, + "license": "MIT", + "license_ids": [ + "MIT" + ], + "license_file": "LICENSE" + }, "wycheproof 0.6.0": { "name": "wycheproof", "version": "0.6.0", @@ -86578,7 +86513,7 @@ } }, "binary_crates": [ - "canbench 0.1.7", + "canbench 0.1.8", "ic-wasm 0.8.4", "metrics-proxy 0.1.0" ], @@ -87686,8 +87621,8 @@ "byteorder 1.5.0", "bytes 1.8.0", "cached 0.49.2", - "canbench 0.1.7", - "canbench-rs 0.1.7", + "canbench 0.1.8", + "canbench-rs 0.1.8", "candid 0.10.10", "candid_parser 0.1.2", "cargo_metadata 0.14.2", diff --git a/Cargo.Bazel.Fuzzing.toml.lock b/Cargo.Bazel.Fuzzing.toml.lock index 75d9febc2e9..9375a7a18ae 100644 --- a/Cargo.Bazel.Fuzzing.toml.lock +++ b/Cargo.Bazel.Fuzzing.toml.lock @@ -1677,9 +1677,9 @@ dependencies = [ [[package]] name = "canbench" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf010ed5b327776525d545cef0fd17ffec73af71eb1b304ca11df3494ed65c31" +checksum = "cb548f9e006ad29b160d37e07435c499af7d2741918e18d95ddc87dfe97a0b8d" dependencies = [ "canbench-rs", "candid", @@ -1699,9 +1699,9 @@ dependencies = [ [[package]] name = "canbench-rs" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e85a8f1ee95044a770b3d5166a12f55814283cb3aed71b81439dc59960ab76c1" +checksum = "497d900e11ab1891dd9743dd45dbeaada540ce323aa1adc7fc0ce1da2c6e86ff" dependencies = [ "canbench-rs-macros", "candid", @@ -1711,9 +1711,9 @@ dependencies = [ [[package]] name = "canbench-rs-macros" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37aa9dbb190b03569ab14aadf669884a331712d54462c5a6c5b86c9867fe4e65" +checksum = "5a5509bcfe6eeb86f057d46fbf20a2ba6b6bf9a1099b053a8f491cd7a909dfa6" dependencies = [ "proc-macro2", "quote", @@ -5105,19 +5105,6 @@ dependencies = [ "serde_bytes", ] -[[package]] -name = "ic-cdk" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b1da6a25b045f9da3c9459c0cb2b0700ac368ee16382975a17185a23b9c18ab" -dependencies = [ - "candid", - "ic-cdk-macros 0.13.2", - "ic0 0.21.1", - "serde", - "serde_bytes", -] - [[package]] name = "ic-cdk" version = "0.14.1" @@ -5185,20 +5172,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "ic-cdk-macros" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a45800053d80a6df839a71aaea5797e723188c0b992618208ca3b941350c7355" -dependencies = [ - "candid", - "proc-macro2", - "quote", - "serde", - "serde_tokenstream 0.1.7", - "syn 1.0.109", -] - [[package]] name = "ic-cdk-macros" version = "0.14.0" @@ -8168,25 +8141,31 @@ dependencies = [ [[package]] name = "pocket-ic" -version = "5.0.0" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "beff607d4dbebff8d003453ced669d2645e905de496ca93713f3d47633357e6c" +checksum = "124a2380ca6f557adf8b02517cbfd2f564113230e14cda6f6aadd3dfe156293c" dependencies = [ "base64 0.13.1", "candid", "hex", - "ic-cdk 0.13.5", + "ic-certification", + "ic-transport-types", "reqwest 0.12.9", "schemars", "serde", "serde_bytes", + "serde_cbor", "serde_json", "sha2 0.10.8", "slog", + "strum 0.26.3", + "strum_macros 0.26.4", + "thiserror 1.0.68", "tokio", "tracing", "tracing-appender", "tracing-subscriber", + "wslpath", ] [[package]] @@ -13098,6 +13077,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dab7ac864710bdea6594becbea5b5050333cf34fefb0dc319567eb347950d4" +[[package]] +name = "wslpath" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04a2ecdf2cc4d33a6a93d71bcfbc00bb1f635cdb8029a2cc0709204a045ec7a3" + [[package]] name = "wycheproof" version = "0.6.0" diff --git a/Cargo.Bazel.json.lock b/Cargo.Bazel.json.lock index 1aaef252187..57dab66de73 100644 --- a/Cargo.Bazel.json.lock +++ b/Cargo.Bazel.json.lock @@ -1,5 +1,5 @@ { - "checksum": "a3824c6dae08a250d31e5907e2eff62575b38fd163dbb7e1c0f0ba342d66dc29", + "checksum": "9504732445804a8b57396f4c87d76da0f2cd5c9d2a16e0a13b9f08cc4dd54070", "crates": { "abnf 0.12.0": { "name": "abnf", @@ -9865,14 +9865,14 @@ ], "license_file": "LICENSE-APACHE" }, - "canbench 0.1.7": { + "canbench 0.1.8": { "name": "canbench", - "version": "0.1.7", + "version": "0.1.8", "package_url": "https://github.com/dfinity/canbench", "repository": { "Http": { - "url": "https://static.crates.io/crates/canbench/0.1.7/download", - "sha256": "bf010ed5b327776525d545cef0fd17ffec73af71eb1b304ca11df3494ed65c31" + "url": "https://static.crates.io/crates/canbench/0.1.8/download", + "sha256": "cb548f9e006ad29b160d37e07435c499af7d2741918e18d95ddc87dfe97a0b8d" } }, "targets": [ @@ -9909,7 +9909,7 @@ "deps": { "common": [ { - "id": "canbench-rs 0.1.7", + "id": "canbench-rs 0.1.8", "target": "canbench_rs" }, { @@ -9933,7 +9933,7 @@ "target": "hex" }, { - "id": "pocket-ic 5.0.0", + "id": "pocket-ic 6.0.0", "target": "pocket_ic" }, { @@ -9968,7 +9968,7 @@ "selects": {} }, "edition": "2021", - "version": "0.1.7" + "version": "0.1.8" }, "license": "Apache-2.0", "license_ids": [ @@ -9976,14 +9976,14 @@ ], "license_file": null }, - "canbench-rs 0.1.7": { + "canbench-rs 0.1.8": { "name": "canbench-rs", - "version": "0.1.7", + "version": "0.1.8", "package_url": "https://github.com/dfinity/canbench", "repository": { "Http": { - "url": "https://static.crates.io/crates/canbench-rs/0.1.7/download", - "sha256": "e85a8f1ee95044a770b3d5166a12f55814283cb3aed71b81439dc59960ab76c1" + "url": "https://static.crates.io/crates/canbench-rs/0.1.8/download", + "sha256": "497d900e11ab1891dd9743dd45dbeaada540ce323aa1adc7fc0ce1da2c6e86ff" } }, "targets": [ @@ -10026,13 +10026,13 @@ "proc_macro_deps": { "common": [ { - "id": "canbench-rs-macros 0.1.7", + "id": "canbench-rs-macros 0.1.8", "target": "canbench_rs_macros" } ], "selects": {} }, - "version": "0.1.7" + "version": "0.1.8" }, "license": "Apache-2.0", "license_ids": [ @@ -10040,14 +10040,14 @@ ], "license_file": null }, - "canbench-rs-macros 0.1.7": { + "canbench-rs-macros 0.1.8": { "name": "canbench-rs-macros", - "version": "0.1.7", + "version": "0.1.8", "package_url": "https://github.com/dfinity/canbench", "repository": { "Http": { - "url": "https://static.crates.io/crates/canbench-rs-macros/0.1.7/download", - "sha256": "37aa9dbb190b03569ab14aadf669884a331712d54462c5a6c5b86c9867fe4e65" + "url": "https://static.crates.io/crates/canbench-rs-macros/0.1.8/download", + "sha256": "5a5509bcfe6eeb86f057d46fbf20a2ba6b6bf9a1099b053a8f491cd7a909dfa6" } }, "targets": [ @@ -10087,7 +10087,7 @@ "selects": {} }, "edition": "2021", - "version": "0.1.7" + "version": "0.1.8" }, "license": "Apache-2.0", "license_ids": [ @@ -18393,11 +18393,11 @@ "target": "cached" }, { - "id": "canbench 0.1.7", + "id": "canbench 0.1.8", "target": "canbench" }, { - "id": "canbench-rs 0.1.7", + "id": "canbench-rs 0.1.8", "target": "canbench_rs" }, { @@ -31293,74 +31293,6 @@ ], "license_file": "LICENSE" }, - "ic-cdk 0.13.5": { - "name": "ic-cdk", - "version": "0.13.5", - "package_url": "https://github.com/dfinity/cdk-rs", - "repository": { - "Http": { - "url": "https://static.crates.io/crates/ic-cdk/0.13.5/download", - "sha256": "3b1da6a25b045f9da3c9459c0cb2b0700ac368ee16382975a17185a23b9c18ab" - } - }, - "targets": [ - { - "Library": { - "crate_name": "ic_cdk", - "crate_root": "src/lib.rs", - "srcs": { - "allow_empty": true, - "include": [ - "**/*.rs" - ] - } - } - } - ], - "library_target_name": "ic_cdk", - "common_attrs": { - "compile_data_glob": [ - "**" - ], - "deps": { - "common": [ - { - "id": "candid 0.10.10", - "target": "candid" - }, - { - "id": "ic0 0.21.1", - "target": "ic0" - }, - { - "id": "serde 1.0.214", - "target": "serde" - }, - { - "id": "serde_bytes 0.11.15", - "target": "serde_bytes" - } - ], - "selects": {} - }, - "edition": "2021", - "proc_macro_deps": { - "common": [ - { - "id": "ic-cdk-macros 0.13.2", - "target": "ic_cdk_macros" - } - ], - "selects": {} - }, - "version": "0.13.5" - }, - "license": "Apache-2.0", - "license_ids": [ - "Apache-2.0" - ], - "license_file": "LICENSE" - }, "ic-cdk 0.14.1": { "name": "ic-cdk", "version": "0.14.1", @@ -31699,73 +31631,6 @@ ], "license_file": "LICENSE" }, - "ic-cdk-macros 0.13.2": { - "name": "ic-cdk-macros", - "version": "0.13.2", - "package_url": "https://github.com/dfinity/cdk-rs", - "repository": { - "Http": { - "url": "https://static.crates.io/crates/ic-cdk-macros/0.13.2/download", - "sha256": "a45800053d80a6df839a71aaea5797e723188c0b992618208ca3b941350c7355" - } - }, - "targets": [ - { - "ProcMacro": { - "crate_name": "ic_cdk_macros", - "crate_root": "src/lib.rs", - "srcs": { - "allow_empty": true, - "include": [ - "**/*.rs" - ] - } - } - } - ], - "library_target_name": "ic_cdk_macros", - "common_attrs": { - "compile_data_glob": [ - "**" - ], - "deps": { - "common": [ - { - "id": "candid 0.10.10", - "target": "candid" - }, - { - "id": "proc-macro2 1.0.89", - "target": "proc_macro2" - }, - { - "id": "quote 1.0.37", - "target": "quote" - }, - { - "id": "serde 1.0.214", - "target": "serde" - }, - { - "id": "serde_tokenstream 0.1.7", - "target": "serde_tokenstream" - }, - { - "id": "syn 1.0.109", - "target": "syn" - } - ], - "selects": {} - }, - "edition": "2021", - "version": "0.13.2" - }, - "license": "Apache-2.0", - "license_ids": [ - "Apache-2.0" - ], - "license_file": "LICENSE" - }, "ic-cdk-macros 0.14.0": { "name": "ic-cdk-macros", "version": "0.14.0", @@ -49941,14 +49806,14 @@ ], "license_file": "LICENSE" }, - "pocket-ic 5.0.0": { + "pocket-ic 6.0.0": { "name": "pocket-ic", - "version": "5.0.0", + "version": "6.0.0", "package_url": "https://github.com/dfinity/ic", "repository": { "Http": { - "url": "https://static.crates.io/crates/pocket-ic/5.0.0/download", - "sha256": "beff607d4dbebff8d003453ced669d2645e905de496ca93713f3d47633357e6c" + "url": "https://static.crates.io/crates/pocket-ic/6.0.0/download", + "sha256": "124a2380ca6f557adf8b02517cbfd2f564113230e14cda6f6aadd3dfe156293c" } }, "targets": [ @@ -49985,8 +49850,12 @@ "target": "hex" }, { - "id": "ic-cdk 0.13.5", - "target": "ic_cdk" + "id": "ic-certification 2.6.0", + "target": "ic_certification" + }, + { + "id": "ic-transport-types 0.37.1", + "target": "ic_transport_types" }, { "id": "reqwest 0.12.9", @@ -50004,6 +49873,10 @@ "id": "serde_bytes 0.11.15", "target": "serde_bytes" }, + { + "id": "serde_cbor 0.11.2", + "target": "serde_cbor" + }, { "id": "serde_json 1.0.132", "target": "serde_json" @@ -50016,6 +49889,14 @@ "id": "slog 2.7.0", "target": "slog" }, + { + "id": "strum 0.26.3", + "target": "strum" + }, + { + "id": "thiserror 1.0.68", + "target": "thiserror" + }, { "id": "tokio 1.41.1", "target": "tokio" @@ -50033,10 +49914,26 @@ "target": "tracing_subscriber" } ], - "selects": {} + "selects": { + "cfg(windows)": [ + { + "id": "wslpath 0.0.2", + "target": "wslpath" + } + ] + } }, "edition": "2021", - "version": "5.0.0" + "proc_macro_deps": { + "common": [ + { + "id": "strum_macros 0.26.4", + "target": "strum_macros" + } + ], + "selects": {} + }, + "version": "6.0.0" }, "license": "Apache-2.0", "license_ids": [ @@ -85062,6 +84959,44 @@ ], "license_file": "LICENSE" }, + "wslpath 0.0.2": { + "name": "wslpath", + "version": "0.0.2", + "package_url": "https://github.com/pratikpc/wsl-path-rust", + "repository": { + "Http": { + "url": "https://static.crates.io/crates/wslpath/0.0.2/download", + "sha256": "04a2ecdf2cc4d33a6a93d71bcfbc00bb1f635cdb8029a2cc0709204a045ec7a3" + } + }, + "targets": [ + { + "Library": { + "crate_name": "wslpath", + "crate_root": "src/lib.rs", + "srcs": { + "allow_empty": true, + "include": [ + "**/*.rs" + ] + } + } + } + ], + "library_target_name": "wslpath", + "common_attrs": { + "compile_data_glob": [ + "**" + ], + "edition": "2018", + "version": "0.0.2" + }, + "license": "MIT", + "license_ids": [ + "MIT" + ], + "license_file": "LICENSE" + }, "wycheproof 0.6.0": { "name": "wycheproof", "version": "0.6.0", @@ -86536,7 +86471,7 @@ } }, "binary_crates": [ - "canbench 0.1.7", + "canbench 0.1.8", "ic-wasm 0.8.4", "metrics-proxy 0.1.0" ], @@ -87566,8 +87501,8 @@ "byteorder 1.5.0", "bytes 1.8.0", "cached 0.49.2", - "canbench 0.1.7", - "canbench-rs 0.1.7", + "canbench 0.1.8", + "canbench-rs 0.1.8", "candid 0.10.10", "candid_parser 0.1.2", "cargo_metadata 0.14.2", diff --git a/Cargo.Bazel.toml.lock b/Cargo.Bazel.toml.lock index 556b8d5bc51..19932d0dfb3 100644 --- a/Cargo.Bazel.toml.lock +++ b/Cargo.Bazel.toml.lock @@ -1678,9 +1678,9 @@ dependencies = [ [[package]] name = "canbench" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf010ed5b327776525d545cef0fd17ffec73af71eb1b304ca11df3494ed65c31" +checksum = "cb548f9e006ad29b160d37e07435c499af7d2741918e18d95ddc87dfe97a0b8d" dependencies = [ "canbench-rs", "candid", @@ -1700,9 +1700,9 @@ dependencies = [ [[package]] name = "canbench-rs" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e85a8f1ee95044a770b3d5166a12f55814283cb3aed71b81439dc59960ab76c1" +checksum = "497d900e11ab1891dd9743dd45dbeaada540ce323aa1adc7fc0ce1da2c6e86ff" dependencies = [ "canbench-rs-macros", "candid", @@ -1712,9 +1712,9 @@ dependencies = [ [[package]] name = "canbench-rs-macros" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37aa9dbb190b03569ab14aadf669884a331712d54462c5a6c5b86c9867fe4e65" +checksum = "5a5509bcfe6eeb86f057d46fbf20a2ba6b6bf9a1099b053a8f491cd7a909dfa6" dependencies = [ "proc-macro2", "quote", @@ -5095,19 +5095,6 @@ dependencies = [ "serde_bytes", ] -[[package]] -name = "ic-cdk" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b1da6a25b045f9da3c9459c0cb2b0700ac368ee16382975a17185a23b9c18ab" -dependencies = [ - "candid", - "ic-cdk-macros 0.13.2", - "ic0 0.21.1", - "serde", - "serde_bytes", -] - [[package]] name = "ic-cdk" version = "0.14.1" @@ -5175,20 +5162,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "ic-cdk-macros" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a45800053d80a6df839a71aaea5797e723188c0b992618208ca3b941350c7355" -dependencies = [ - "candid", - "proc-macro2", - "quote", - "serde", - "serde_tokenstream 0.1.7", - "syn 1.0.109", -] - [[package]] name = "ic-cdk-macros" version = "0.14.0" @@ -8158,25 +8131,31 @@ dependencies = [ [[package]] name = "pocket-ic" -version = "5.0.0" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "beff607d4dbebff8d003453ced669d2645e905de496ca93713f3d47633357e6c" +checksum = "124a2380ca6f557adf8b02517cbfd2f564113230e14cda6f6aadd3dfe156293c" dependencies = [ "base64 0.13.1", "candid", "hex", - "ic-cdk 0.13.5", + "ic-certification", + "ic-transport-types", "reqwest 0.12.9", "schemars", "serde", "serde_bytes", + "serde_cbor", "serde_json", "sha2 0.10.8", "slog", + "strum 0.26.3", + "strum_macros 0.26.4", + "thiserror 1.0.68", "tokio", "tracing", "tracing-appender", "tracing-subscriber", + "wslpath", ] [[package]] @@ -13093,6 +13072,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dab7ac864710bdea6594becbea5b5050333cf34fefb0dc319567eb347950d4" +[[package]] +name = "wslpath" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04a2ecdf2cc4d33a6a93d71bcfbc00bb1f635cdb8029a2cc0709204a045ec7a3" + [[package]] name = "wycheproof" version = "0.6.0" diff --git a/MODULE.bazel b/MODULE.bazel index 1aee2d6536a..74ed610fb57 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -782,8 +782,8 @@ http_file( http_file( name = "pocket-ic-mainnet-gz", - sha256 = "454891cac2421f3f894759ec5e6b6e48fbb544d79197bc29b88d34b93d78a4f1", - url = "https://download.dfinity.systems/ic/52ebccfba8855e23dcad9657a8d6e6be01df71f9/binaries/x86_64-linux/pocket-ic.gz", + sha256 = "0935ee6ece312719aae4eabddec2dfc6af34d5edbddf4d3af53bccd1b3636044", + url = "https://download.dfinity.systems/ic/172f8c78653c93ad101af75b94251439b4ccf098/binaries/x86_64-linux/pocket-ic.gz", ) # Patches diff --git a/bazel/external_crates.bzl b/bazel/external_crates.bzl index bb628cdcc0b..a6b5e5803ab 100644 --- a/bazel/external_crates.bzl +++ b/bazel/external_crates.bzl @@ -329,10 +329,10 @@ def external_crates_repository(name, cargo_lockfile, lockfile, sanitizers_enable default_features = False, ), "canbench": crate.spec( - version = "^0.1.7", + version = "^0.1.8", ), "canbench-rs": crate.spec( - version = "^0.1.7", + version = "^0.1.8", ), "candid": crate.spec( version = "^0.10.6", From bed178d774e471e5665dc5dd402a8e9410ba623e Mon Sep 17 00:00:00 2001 From: Alin Sinpalean <58422065+alin-at-dfinity@users.noreply.github.com> Date: Wed, 4 Dec 2024 22:15:00 +0100 Subject: [PATCH 13/23] feat: Add various subnet info metrics (#2969) Export information about subnet type, size, features, initial notary delay, max block payload size when reading the registry at the top of the round. --- rs/messaging/src/message_routing.rs | 89 ++++++++++++++++++++++++++--- 1 file changed, 82 insertions(+), 7 deletions(-) diff --git a/rs/messaging/src/message_routing.rs b/rs/messaging/src/message_routing.rs index a17777ae989..e4486b18bc0 100644 --- a/rs/messaging/src/message_routing.rs +++ b/rs/messaging/src/message_routing.rs @@ -44,17 +44,17 @@ use ic_types::{ use ic_utils_thread::JoinOnDrop; #[cfg(test)] use mockall::automock; -use prometheus::{Histogram, HistogramVec, IntCounter, IntCounterVec, IntGauge, IntGaugeVec}; +use prometheus::{ + Gauge, Histogram, HistogramVec, IntCounter, IntCounterVec, IntGauge, IntGaugeVec, +}; +use std::collections::{BTreeMap, BTreeSet, VecDeque}; +use std::convert::{AsRef, TryFrom}; +use std::net::{Ipv4Addr, Ipv6Addr}; use std::ops::Range; use std::sync::mpsc::{sync_channel, TrySendError}; use std::sync::{Arc, Mutex, RwLock}; use std::thread::sleep; -use std::{ - collections::{BTreeMap, BTreeSet, VecDeque}, - convert::TryFrom, - net::{Ipv4Addr, Ipv6Addr}, - time::Instant, -}; +use std::time::Instant; use tracing::instrument; #[cfg(test)] @@ -99,6 +99,13 @@ const METRIC_WASM_CUSTOM_SECTIONS_MEMORY_USAGE_BYTES: &str = const METRIC_CANISTER_HISTORY_MEMORY_USAGE_BYTES: &str = "mr_canister_history_memory_usage_bytes"; const METRIC_CANISTER_HISTORY_TOTAL_NUM_CHANGES: &str = "mr_canister_history_total_num_changes"; +const METRIC_SUBNET_INFO: &str = "mr_subnet_info"; +const METRIC_SUBNET_SIZE: &str = "mr_subnet_size"; +const METRIC_MAX_CANISTERS: &str = "mr_subnet_max_canisters"; +const METRIC_INITIAL_NOTARY_DELAY: &str = "mr_subnet_initial_notary_delay_seconds"; +const METRIC_MAX_BLOCK_PAYLOAD_SIZE: &str = "mr_subnet_max_block_payload_size_bytes"; +const METRIC_SUBNET_FEATURES: &str = "mr_subnet_features"; + const CRITICAL_ERROR_MISSING_SUBNET_SIZE: &str = "cycles_account_manager_missing_subnet_size_error"; const CRITICAL_ERROR_MISSING_OR_INVALID_NODE_PUBLIC_KEYS: &str = "mr_missing_or_invalid_node_public_keys"; @@ -314,6 +321,13 @@ pub(crate) struct MessageRoutingMetrics { /// The total number of changes in canister history per canister on this subnet. canister_history_total_num_changes: Histogram, + subnet_info: IntGaugeVec, + subnet_size: IntGauge, + max_canisters: IntGauge, + initial_notary_delay: Gauge, + max_block_payload_size: IntGauge, + subnet_features: IntGaugeVec, + /// Critical error for not being able to calculate a subnet size. critical_error_missing_subnet_size: IntCounter, /// Critical error: public keys of own subnet nodes are missing @@ -438,6 +452,33 @@ impl MessageRoutingMetrics { decimal_buckets_with_zero(0, 3), ), + subnet_info: metrics_registry.int_gauge_vec( + METRIC_SUBNET_INFO, + "Subnet ID and type, from the subnet record in the registry.", + &["subnet_id", "subnet_type"], + ), + subnet_size: metrics_registry.int_gauge( + METRIC_SUBNET_SIZE, + "Number of nodes in the subnet, per the subnet record in the registry.", + ), + max_canisters: metrics_registry.int_gauge( + METRIC_MAX_CANISTERS, + "Maximum number of canisters that can be created on this subnet.", + ), + initial_notary_delay: metrics_registry.gauge( + METRIC_INITIAL_NOTARY_DELAY, + "Initial delay for notarization, in seconds.", + ), + max_block_payload_size: metrics_registry.int_gauge( + METRIC_MAX_BLOCK_PAYLOAD_SIZE, + "Maximum size of a block payload, in bytes.", + ), + subnet_features: metrics_registry.int_gauge_vec( + METRIC_SUBNET_FEATURES, + "Subnet features and their status (enabled or disabled).", + &["feature"], + ), + critical_error_missing_subnet_size: metrics_registry .error_counter(CRITICAL_ERROR_MISSING_SUBNET_SIZE), critical_error_missing_or_invalid_node_public_keys: metrics_registry @@ -824,6 +865,40 @@ impl BatchProcessorImpl { .len() }; + let own_subnet_type: SubnetType = subnet_record.subnet_type.try_into().unwrap_or_default(); + self.metrics + .subnet_info + .with_label_values(&[&own_subnet_id.to_string(), own_subnet_type.as_ref()]) + .set(1); + self.metrics.subnet_size.set(subnet_size as i64); + self.metrics + .max_canisters + .set(max_number_of_canisters as i64); + self.metrics + .initial_notary_delay + .set(subnet_record.initial_notary_delay_millis as f64 * 1e-3); + self.metrics + .max_block_payload_size + .set(subnet_record.max_block_payload_size as i64); + // Please export any new features via the `subnet_features` metric below. + let SubnetFeatures { + canister_sandboxing, + http_requests, + sev_enabled, + } = &subnet_features; + self.metrics + .subnet_features + .with_label_values(&["canister_sandboxing"]) + .set(*canister_sandboxing as i64); + self.metrics + .subnet_features + .with_label_values(&["http_requests"]) + .set(*http_requests as i64); + self.metrics + .subnet_features + .with_label_values(&["sev_enabled"]) + .set(*sev_enabled as i64); + Ok(( network_topology, subnet_features, From 6e5283ddc7be94517e15598788a85e886682c4fd Mon Sep 17 00:00:00 2001 From: max-dfinity <100170574+max-dfinity@users.noreply.github.com> Date: Wed, 4 Dec 2024 14:48:39 -0800 Subject: [PATCH 14/23] refactor(nns): Move recompute_tally to inside of cast_vote_and_cascade_follow (#2977) This removes a difficult edge case of trying to determine when to re-tally. We know that any votes cast in cast_vote_and_cascade_follow are either when a proposal is created, or when register_vote is called, which makes sure the vote cast is cast while the proposal voting is still open. This helps later when votes can be processed across multiple messages. --- .../governance/canbench/canbench_results.yml | 36 +++++------ rs/nns/governance/src/governance.rs | 10 --- rs/nns/governance/src/voting.rs | 61 ++++++++++++++++--- 3 files changed, 71 insertions(+), 36 deletions(-) diff --git a/rs/nns/governance/canbench/canbench_results.yml b/rs/nns/governance/canbench/canbench_results.yml index e15d386e650..414e9b32895 100644 --- a/rs/nns/governance/canbench/canbench_results.yml +++ b/rs/nns/governance/canbench/canbench_results.yml @@ -1,61 +1,61 @@ benches: add_neuron_active_maximum: total: - instructions: 36059483 + instructions: 36059517 heap_increase: 1 stable_memory_increase: 0 scopes: {} add_neuron_active_typical: total: - instructions: 1830111 + instructions: 1830145 heap_increase: 0 stable_memory_increase: 0 scopes: {} add_neuron_inactive_maximum: total: - instructions: 96070090 + instructions: 96070124 heap_increase: 1 stable_memory_increase: 0 scopes: {} add_neuron_inactive_typical: total: - instructions: 7372887 + instructions: 7372921 heap_increase: 0 stable_memory_increase: 0 scopes: {} cascading_vote_all_heap: total: - instructions: 31756954 + instructions: 34047728 heap_increase: 0 stable_memory_increase: 0 scopes: {} cascading_vote_heap_neurons_stable_index: total: - instructions: 54261919 + instructions: 56554267 heap_increase: 0 stable_memory_increase: 0 scopes: {} cascading_vote_stable_everything: total: - instructions: 160631204 + instructions: 162950875 heap_increase: 0 stable_memory_increase: 0 scopes: {} cascading_vote_stable_neurons_with_heap_index: total: - instructions: 138235474 + instructions: 140555145 heap_increase: 0 stable_memory_increase: 0 scopes: {} centralized_following_all_stable: total: - instructions: 66109851 + instructions: 68428123 heap_increase: 0 stable_memory_increase: 0 scopes: {} compute_ballots_for_new_proposal_with_stable_neurons: total: - instructions: 1826966 + instructions: 1830966 heap_increase: 0 stable_memory_increase: 0 scopes: {} @@ -67,7 +67,7 @@ benches: scopes: {} draw_maturity_from_neurons_fund_stable: total: - instructions: 10256384 + instructions: 10264384 heap_increase: 0 stable_memory_increase: 0 scopes: {} @@ -79,7 +79,7 @@ benches: scopes: {} list_neurons_ready_to_unstake_maturity_stable: total: - instructions: 37619197 + instructions: 37739237 heap_increase: 0 stable_memory_increase: 0 scopes: {} @@ -91,7 +91,7 @@ benches: scopes: {} list_ready_to_spawn_neuron_ids_stable: total: - instructions: 37607898 + instructions: 37727938 heap_increase: 0 stable_memory_increase: 0 scopes: {} @@ -103,26 +103,26 @@ benches: scopes: {} neuron_metrics_calculation_stable: total: - instructions: 1861928 + instructions: 1865928 heap_increase: 0 stable_memory_increase: 0 scopes: {} range_neurons_performance: total: - instructions: 48539417 + instructions: 48547417 heap_increase: 0 stable_memory_increase: 0 scopes: {} single_vote_all_stable: total: - instructions: 364631 + instructions: 2690730 heap_increase: 0 stable_memory_increase: 0 scopes: {} update_recent_ballots_stable_memory: total: - instructions: 13454595 + instructions: 13454675 heap_increase: 0 stable_memory_increase: 0 scopes: {} -version: 0.1.7 +version: 0.1.8 diff --git a/rs/nns/governance/src/governance.rs b/rs/nns/governance/src/governance.rs index 809f901bf08..c4b1c7645d2 100644 --- a/rs/nns/governance/src/governance.rs +++ b/rs/nns/governance/src/governance.rs @@ -4107,16 +4107,6 @@ impl Governance { let topic = proposal.topic(); let voting_period_seconds = voting_period_seconds_fn(topic); - // Recompute the tally here. It should correctly reflect all votes, - // even the ones after the proposal has been decided. It's possible - // to have Open status while it does not accept votes anymore, since - // the status change happens below this point. - if proposal.status() == ProposalStatus::Open - || proposal.accepts_vote(now_seconds, voting_period_seconds) - { - proposal.recompute_tally(now_seconds, voting_period_seconds); - } - if proposal.status() != ProposalStatus::Open { return; } diff --git a/rs/nns/governance/src/voting.rs b/rs/nns/governance/src/voting.rs index a00579c3a49..3cc52fa90ae 100644 --- a/rs/nns/governance/src/voting.rs +++ b/rs/nns/governance/src/voting.rs @@ -20,13 +20,19 @@ impl Governance { vote_of_neuron: Vote, topic: Topic, ) { + let voting_started = self.env.now(); + let neuron_store = &mut self.neuron_store; - let ballots = &mut self - .heap_data - .proposals - .get_mut(&proposal_id.id) - .unwrap() - .ballots; + let ballots = match self.heap_data.proposals.get_mut(&proposal_id.id) { + Some(proposal) => &mut proposal.ballots, + None => { + // This is a critical error, but there is nothing that can be done about it + // at this place. We somehow have a vote for a proposal that doesn't exist. + eprintln!("error in cast_vote_and_cascade_follow when gathering induction votes: Proposal not found"); + return; + } + }; + // Use of thread local storage to store the state machines prevents // more than one state machine per proposal, which limits the overall // memory usage for voting, which will be relevant when this can be used @@ -44,6 +50,29 @@ impl Governance { voting_state_machines.remove_if_done(&proposal_id); }); + // We use the time from the beginning of the function to retain the behaviors needed + // for wait for quiet even when votes can be processed asynchronously. + self.recompute_proposal_tally(proposal_id, voting_started); + } + + /// Recompute the tally for a proposal, using the time provided as the current time. + fn recompute_proposal_tally(&mut self, proposal_id: ProposalId, now: u64) { + let voting_period_seconds_fn = self.voting_period_seconds(); + + let proposal = match self.heap_data.proposals.get_mut(&proposal_id.id) { + None => { + // This is a critical error, but there is nothing that can be done about it + // at this place. We somehow have a vote for a proposal that doesn't exist. + eprintln!( + "error in recompute_proposal_tally: Proposal not found: {}", + proposal_id.id + ); + return; + } + Some(proposal) => &mut *proposal, + }; + let topic = proposal.topic(); + proposal.recompute_tally(now, voting_period_seconds_fn(topic)); } } @@ -229,7 +258,7 @@ mod test { governance::{Governance, MIN_DISSOLVE_DELAY_FOR_VOTE_ELIGIBILITY_SECONDS}, neuron::{DissolveStateAndAge, Neuron, NeuronBuilder}, neuron_store::NeuronStore, - pb::v1::{neuron::Followees, Ballot, ProposalData, Topic, Vote}, + pb::v1::{neuron::Followees, Ballot, ProposalData, Tally, Topic, Vote}, test_utils::{MockEnvironment, StubCMC, StubIcpLedger}, voting::ProposalVotingStateMachine, }; @@ -446,7 +475,7 @@ mod test { }; let mut governance = Governance::new( governance_proto, - Box::new(MockEnvironment::new(Default::default(), 0)), + Box::new(MockEnvironment::new(Default::default(), 234)), Box::new(StubIcpLedger {}), Box::new(StubCMC {}), ); @@ -478,6 +507,22 @@ mod test { 6 => make_ballot(deciding_voting_power(NeuronId { id: 6 }), Vote::Unspecified), } ); + let expected_tally = Tally { + timestamp_seconds: 234, + yes: 530, + no: 0, + total: 636, + }; + assert_eq!( + governance + .heap_data + .proposals + .get(&1) + .unwrap() + .latest_tally + .unwrap(), + expected_tally + ); } fn add_neuron_with_ballot( From b6e0faa0554333c69480cf25ca68317e201c11d2 Mon Sep 17 00:00:00 2001 From: Andrew Battat <113942931+andrewbattat@users.noreply.github.com> Date: Wed, 4 Dec 2024 17:38:48 -0600 Subject: [PATCH 15/23] feat(node): update-config.service (#2393) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit NODE-1493 update-config.service updates the old ic-os config format to the new ic-os config format: https://github.com/dfinity/ic/pull/1539 Once all node have an updated version of the config, we will be able to switch nodes to _[actually use](https://github.com/dfinity/ic/pull/1563)_ the new config format. Then, once stable, we will delete update-config.service I've successfully tested on farm and bare metal 🥳 --------- Co-authored-by: IDX GitHub Automation --- ic-os/components/guestos.bzl | 3 + ic-os/components/hostos.bzl | 3 + .../update-guestos-config.service | 12 + .../update-hostos-config.service | 10 + ic-os/guestos/defs.bzl | 3 + ic-os/hostos/defs.bzl | 1 + rs/ic_os/config/BUILD.bazel | 2 +- rs/ic_os/config/src/lib.rs | 1 + rs/ic_os/config/src/main.rs | 27 + rs/ic_os/config/src/update_config.rs | 542 ++++++++++++++++++ rs/ic_os/config_types/src/lib.rs | 4 + 11 files changed, 607 insertions(+), 1 deletion(-) create mode 100644 ic-os/components/misc/update-config/update-guestos-config.service create mode 100644 ic-os/components/misc/update-config/update-hostos-config.service create mode 100644 rs/ic_os/config/src/update_config.rs diff --git a/ic-os/components/guestos.bzl b/ic-os/components/guestos.bzl index 0de94a44be5..7cba763ee3b 100644 --- a/ic-os/components/guestos.bzl +++ b/ic-os/components/guestos.bzl @@ -160,4 +160,7 @@ component_files = { # fstrim Label("fstrim/sync_fstrim.sh"): "/opt/ic/bin/sync_fstrim.sh", + + # TODO(NODE-1519): delete update-config.service after switch to new icos config + Label("misc/update-config/update-guestos-config.service"): "/etc/systemd/system/update-config.service", } diff --git a/ic-os/components/hostos.bzl b/ic-os/components/hostos.bzl index 9446f2aad36..3566742c219 100644 --- a/ic-os/components/hostos.bzl +++ b/ic-os/components/hostos.bzl @@ -97,4 +97,7 @@ component_files = { Label("upgrade/systemd-generators/hostos/mount-generator"): "/etc/systemd/system-generators/mount-generator", Label("upgrade/systemd-generators/systemd-gpt-auto-generator"): "/etc/systemd/system-generators/systemd-gpt-auto-generator", Label("upgrade/install-upgrade.sh"): "/opt/ic/bin/install-upgrade.sh", + + # TODO(NODE-1519): delete update-config.service after switch to new icos config + Label("misc/update-config/update-hostos-config.service"): "/etc/systemd/system/update-config.service", } diff --git a/ic-os/components/misc/update-config/update-guestos-config.service b/ic-os/components/misc/update-config/update-guestos-config.service new file mode 100644 index 00000000000..5e92de2d043 --- /dev/null +++ b/ic-os/components/misc/update-config/update-guestos-config.service @@ -0,0 +1,12 @@ +[Unit] +Description=Update GuestOS Configuration +After=bootstrap-ic-node.service +Requires=bootstrap-ic-node.service + +[Service] +Type=oneshot +ExecStart=/opt/ic/bin/config update-guestos-config +RemainAfterExit=yes + +[Install] +WantedBy=multi-user.target diff --git a/ic-os/components/misc/update-config/update-hostos-config.service b/ic-os/components/misc/update-config/update-hostos-config.service new file mode 100644 index 00000000000..82022c65def --- /dev/null +++ b/ic-os/components/misc/update-config/update-hostos-config.service @@ -0,0 +1,10 @@ +[Unit] +Description=Update HostOS Configuration + +[Service] +Type=oneshot +ExecStart=/opt/ic/bin/config update-hostos-config +RemainAfterExit=yes + +[Install] +WantedBy=multi-user.target diff --git a/ic-os/guestos/defs.bzl b/ic-os/guestos/defs.bzl index 177302685d2..ce142eed84d 100644 --- a/ic-os/guestos/defs.bzl +++ b/ic-os/guestos/defs.bzl @@ -54,6 +54,9 @@ def image_deps(mode, malicious = False): # additional libraries to install "//rs/ic_os/release:nss_icos": "/usr/lib/x86_64-linux-gnu/libnss_icos.so.2:0644", # Allows referring to the guest IPv6 by name guestos from host, and host as hostos from guest. + + # TODO(NODE-1519): delete config tool from guestos after switch to new icos config + "//rs/ic_os/release:config": "/opt/ic/bin/config:0755", }, # Set various configuration values diff --git a/ic-os/hostos/defs.bzl b/ic-os/hostos/defs.bzl index e6c1af9dcc3..114e6cb361a 100644 --- a/ic-os/hostos/defs.bzl +++ b/ic-os/hostos/defs.bzl @@ -32,6 +32,7 @@ def image_deps(mode, _malicious = False): "//rs/ic_os/release:vsock_host": "/opt/ic/bin/vsock_host:0755", "//rs/ic_os/release:hostos_tool": "/opt/ic/bin/hostos_tool:0755", "//rs/ic_os/release:metrics-proxy": "/opt/ic/bin/metrics-proxy:0755", + "//rs/ic_os/release:config": "/opt/ic/bin/config:0755", # additional libraries to install "//rs/ic_os/release:nss_icos": "/usr/lib/x86_64-linux-gnu/libnss_icos.so.2:0644", diff --git a/rs/ic_os/config/BUILD.bazel b/rs/ic_os/config/BUILD.bazel index ae3be2c6c82..beeed22a938 100644 --- a/rs/ic_os/config/BUILD.bazel +++ b/rs/ic_os/config/BUILD.bazel @@ -5,6 +5,7 @@ package(default_visibility = ["//rs:ic-os-pkg"]) DEPENDENCIES = [ # Keep sorted. "//rs/ic_os/config_types", + "//rs/ic_os/network", "//rs/ic_os/utils", "//rs/types/types", "@crate_index//:anyhow", @@ -47,7 +48,6 @@ rust_binary( proc_macro_deps = MACRO_DEPENDENCIES, deps = [ ":config_lib", - "//rs/ic_os/network", ] + DEPENDENCIES, ) diff --git a/rs/ic_os/config/src/lib.rs b/rs/ic_os/config/src/lib.rs index dbd4fa9ee17..90889add423 100644 --- a/rs/ic_os/config/src/lib.rs +++ b/rs/ic_os/config/src/lib.rs @@ -1,6 +1,7 @@ pub mod config_ini; pub mod deployment_json; pub mod generate_testnet_config; +pub mod update_config; use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; diff --git a/rs/ic_os/config/src/main.rs b/rs/ic_os/config/src/main.rs index d17f7237115..9b31aadaa28 100644 --- a/rs/ic_os/config/src/main.rs +++ b/rs/ic_os/config/src/main.rs @@ -3,6 +3,7 @@ use clap::{Args, Parser, Subcommand}; use config::config_ini::{get_config_ini_settings, ConfigIniSettings}; use config::deployment_json::get_deployment_settings; use config::serialize_and_write_config; +use config::update_config::{update_guestos_config, update_hostos_config}; use macaddr::MacAddr6; use network::resolve_mgmt_mac; use regex::Regex; @@ -55,6 +56,18 @@ pub enum Commands { }, /// Creates a GuestOSConfig object directly from GenerateTestnetConfigClapArgs. Only used for testing purposes. GenerateTestnetConfig(GenerateTestnetConfigClapArgs), + /// Creates a GuestOSConfig object from existing guestos configuration files + UpdateGuestosConfig, + UpdateHostosConfig { + #[arg(long, default_value = config::DEFAULT_HOSTOS_CONFIG_INI_FILE_PATH, value_name = "config.ini")] + config_ini_path: PathBuf, + + #[arg(long, default_value = config::DEFAULT_HOSTOS_DEPLOYMENT_JSON_PATH, value_name = "deployment.json")] + deployment_json_path: PathBuf, + + #[arg(long, default_value = config::DEFAULT_HOSTOS_CONFIG_OBJECT_PATH, value_name = "config.json")] + hostos_config_json_path: PathBuf, + }, } #[derive(Parser)] @@ -245,6 +258,7 @@ pub fn main() -> Result<()> { hostos_settings, guestos_settings, }; + // SetupOSConfig is safe to log; it does not contain any secret material println!("SetupOSConfig: {:?}", setupos_config); let setupos_config_json_path = Path::new(&setupos_config_json_path); @@ -367,6 +381,19 @@ pub fn main() -> Result<()> { generate_testnet_config(args, clap_args.guestos_config_json_path) } + // TODO(NODE-1519): delete UpdateGuestosConfig and UpdateHostosConfig after moved to new config format + // Regenerate config.json on *every boot* in case the config structure changes between + // when we roll out the update-config service and when we roll out the 'config integration' + Some(Commands::UpdateGuestosConfig) => update_guestos_config(), + Some(Commands::UpdateHostosConfig { + config_ini_path, + deployment_json_path, + hostos_config_json_path, + }) => update_hostos_config( + &config_ini_path, + &deployment_json_path, + &hostos_config_json_path, + ), None => { println!("No command provided. Use --help for usage information."); Ok(()) diff --git a/rs/ic_os/config/src/update_config.rs b/rs/ic_os/config/src/update_config.rs new file mode 100644 index 00000000000..c5a18fce0b1 --- /dev/null +++ b/rs/ic_os/config/src/update_config.rs @@ -0,0 +1,542 @@ +use std::collections::HashMap; +use std::ffi::OsStr; +use std::fs; +use std::net::{Ipv4Addr, Ipv6Addr}; +use std::path::{Path, PathBuf}; +use url::Url; + +use anyhow::{anyhow, Context, Result}; +use macaddr::MacAddr6; + +use crate::config_ini::{get_config_ini_settings, ConfigIniSettings}; +use crate::deployment_json::get_deployment_settings; +use crate::serialize_and_write_config; +use config_types::*; +use network::resolve_mgmt_mac; + +pub static CONFIG_ROOT: &str = "/boot/config"; +pub static STATE_ROOT: &str = "/var/lib/ic/data"; + +pub fn update_guestos_config() -> Result<()> { + let config_dir = Path::new(CONFIG_ROOT); + log_directory_structure(config_dir)?; + let state_root = Path::new(STATE_ROOT); + log_directory_structure(state_root)?; + + let network_conf_path = config_dir.join("network.conf"); + let config_json_path = config_dir.join("config.json"); + + let old_config_exists = network_conf_path.exists(); + + if old_config_exists { + // Read existing configuration files and generate new config.json + let network_config_result = read_network_conf(config_dir)?; + let network_settings = network_config_result.network_settings; + let hostname = network_config_result.hostname.clone(); + + let logging = read_filebeat_conf(config_dir)?; + let nns_urls = read_nns_conf(config_dir)?; + + let use_nns_public_key = state_root.join("nns_public_key.pem").exists(); + let use_node_operator_private_key = + state_root.join("node_operator_private_key.pem").exists(); + let use_ssh_authorized_keys = config_dir.join("accounts_ssh_authorized_keys").is_dir(); + + let mgmt_mac = derive_mgmt_mac_from_hostname(hostname.as_deref())?; + let deployment_environment = DeploymentEnvironment::Mainnet; + + let icos_settings = ICOSSettings { + node_reward_type: None, + mgmt_mac, + deployment_environment, + logging, + use_nns_public_key, + nns_urls, + use_node_operator_private_key, + use_ssh_authorized_keys, + icos_dev_settings: ICOSDevSettings::default(), + }; + + let guestos_settings = GuestOSSettings::default(); + + let guestos_config = GuestOSConfig { + config_version: CONFIG_VERSION.to_string(), + network_settings, + icos_settings, + guestos_settings, + }; + + // GuestOSConfig is safe to log; it does not contain any secret material + println!("New GuestOSConfig: {:?}", guestos_config); + + serialize_and_write_config(&config_json_path, &guestos_config)?; + + println!( + "New GuestOSConfig has been written to {}", + config_json_path.display() + ); + } else { + println!("No update-config action taken."); + } + + Ok(()) +} + +fn read_network_conf(config_dir: &Path) -> Result { + let network_conf_path = config_dir.join("network.conf"); + let conf_map = read_conf_file(&network_conf_path)?; + + let ipv6_address_opt = conf_map.get("ipv6_address").cloned(); + let ipv6_gateway_opt = conf_map.get("ipv6_gateway").cloned(); + let ipv4_address_opt = conf_map.get("ipv4_address").cloned(); + let ipv4_gateway_opt = conf_map.get("ipv4_gateway").cloned(); + let domain_name = conf_map.get("domain").cloned(); + let hostname = conf_map.get("hostname").cloned(); + + let ipv6_config = match (ipv6_address_opt, ipv6_gateway_opt) { + (Some(ipv6_address), Some(ipv6_gateway)) => { + let address = ipv6_address; + let gateway = ipv6_gateway + .parse::() + .with_context(|| format!("Invalid IPv6 gateway: {}", ipv6_gateway))?; + Ipv6Config::Fixed(FixedIpv6Config { address, gateway }) + } + _ => Ipv6Config::RouterAdvertisement, + }; + + let ipv4_config = match (ipv4_address_opt, ipv4_gateway_opt) { + (Some(ipv4_address), Some(ipv4_gateway)) => { + let (address_str, prefix_str) = ipv4_address + .split_once('/') + .with_context(|| format!("Invalid ipv4_address format: {}", ipv4_address))?; + let address = address_str + .parse::() + .with_context(|| format!("Invalid IPv4 address: {}", address_str))?; + let prefix_length = prefix_str + .parse::() + .with_context(|| format!("Invalid IPv4 prefix length: {}", prefix_str))?; + let gateway = ipv4_gateway + .parse::() + .with_context(|| format!("Invalid IPv4 gateway: {}", ipv4_gateway))?; + Some(Ipv4Config { + address, + gateway, + prefix_length, + }) + } + _ => None, + }; + + let network_settings = NetworkSettings { + ipv6_config, + ipv4_config, + domain_name, + }; + + Ok(NetworkConfigResult { + network_settings, + hostname, + }) +} + +struct NetworkConfigResult { + network_settings: NetworkSettings, + hostname: Option, +} + +fn read_filebeat_conf(config_dir: &Path) -> Result { + let filebeat_conf_path = config_dir.join("filebeat.conf"); + let conf_map = match read_conf_file(&filebeat_conf_path) { + Ok(map) => map, + Err(_) => { + // Set default values if filebeat.conf doesn't exist + return Ok(Logging { + elasticsearch_hosts: "elasticsearch-node-0.mercury.dfinity.systems:443 \ + elasticsearch-node-1.mercury.dfinity.systems:443 \ + elasticsearch-node-2.mercury.dfinity.systems:443 \ + elasticsearch-node-3.mercury.dfinity.systems:443" + .to_string(), + elasticsearch_tags: None, + }); + } + }; + + let elasticsearch_hosts = conf_map + .get("elasticsearch_hosts") + .cloned() + .unwrap_or_default(); + + let elasticsearch_tags = conf_map.get("elasticsearch_tags").cloned(); + + Ok(Logging { + elasticsearch_hosts, + elasticsearch_tags, + }) +} + +fn read_nns_conf(config_dir: &Path) -> Result> { + let nns_conf_path = config_dir.join("nns.conf"); + let conf_map = match read_conf_file(&nns_conf_path) { + Ok(map) => map, + Err(_) => { + // Set default values if nns.conf doesn't exist + let default_urls = vec![ + Url::parse("https://icp-api.io")?, + Url::parse("https://icp0.io")?, + Url::parse("https://ic0.app")?, + ]; + return Ok(default_urls); + } + }; + + let nns_url_str = conf_map.get("nns_url").cloned().unwrap_or_default(); + + let mut nns_urls = Vec::new(); + for s in nns_url_str.split(',') { + let s = s.trim(); + match Url::parse(s) { + Ok(url) => nns_urls.push(url), + Err(e) => { + println!("Invalid URL '{}': {}", s, e); + } + } + } + + Ok(nns_urls) +} + +fn derive_mgmt_mac_from_hostname(hostname: Option<&str>) -> Result { + if let Some(hostname) = hostname { + if let Some(unformatted_mac) = hostname.strip_prefix("guest-") { + unformatted_mac + .parse() + .map_err(|_| anyhow!("Unable to parse mac address: {}", unformatted_mac)) + } else { + Err(anyhow::anyhow!( + "Hostname does not start with 'guest-': {}", + hostname + )) + } + } else { + Err(anyhow::anyhow!("Hostname is not specified")) + } +} + +fn read_conf_file(path: &Path) -> Result> { + println!("Reading configuration file: {:?}", path); + let content = fs::read_to_string(path) + .with_context(|| format!("Failed to read configuration file: {:?}", path))?; + println!("Contents of {:?}:\n{}", path, content); + + let mut map = HashMap::new(); + for line in content.lines() { + let line = line.trim(); + if line.is_empty() || line.starts_with('#') { + continue; + } + if let Some((key, value)) = line.split_once('=') { + map.insert(key.trim().to_string(), value.trim().to_string()); + } + } + Ok(map) +} + +fn log_directory_structure(path: &Path) -> Result<()> { + println!("Listing directory structure of {}", path.display()); + + if path.is_dir() { + println!("{}/", path.display()); + + for entry in fs::read_dir(path)? { + let entry = entry?; + let entry_path = entry.path(); + + let file_name = entry_path + .file_name() + .unwrap_or_else(|| OsStr::new("")) + .to_string_lossy(); + + if entry_path.is_dir() { + println!(" {}/", file_name); + } else { + println!(" {}", file_name); + } + } + } else { + println!("{} is not a directory", path.display()); + } + + Ok(()) +} + +pub fn update_hostos_config( + config_ini_path: &Path, + deployment_json_path: &Path, + hostos_config_json_path: &PathBuf, +) -> Result<()> { + let old_config_exists = config_ini_path.exists(); + + if old_config_exists { + let hostos_config_json_path = Path::new(&hostos_config_json_path); + + let ConfigIniSettings { + ipv6_prefix, + ipv6_prefix_length, + ipv6_gateway, + ipv4_address, + ipv4_gateway, + ipv4_prefix_length, + domain_name, + verbose, + node_reward_type, + } = get_config_ini_settings(config_ini_path)?; + + let deterministic_config = DeterministicIpv6Config { + prefix: ipv6_prefix, + prefix_length: ipv6_prefix_length, + gateway: ipv6_gateway, + }; + + let ipv4_config = match (ipv4_address, ipv4_gateway, ipv4_prefix_length) { + (Some(address), Some(gateway), Some(prefix_length)) => Some(Ipv4Config { + address, + gateway, + prefix_length, + }), + (None, None, None) => None, + _ => { + println!("Warning: Partial IPv4 configuration provided. All parameters are required for IPv4 configuration."); + None + } + }; + + let network_settings = NetworkSettings { + ipv6_config: Ipv6Config::Deterministic(deterministic_config), + ipv4_config, + domain_name, + }; + + let deployment_json_settings = get_deployment_settings(deployment_json_path)?; + + let logging = Logging { + elasticsearch_hosts: deployment_json_settings.logging.hosts.to_string(), + elasticsearch_tags: None, + }; + + let mgmt_mac = resolve_mgmt_mac(deployment_json_settings.deployment.mgmt_mac)?; + + let use_nns_public_key = Path::new("/boot/config/nns_public_key.pem").exists(); + let use_node_operator_private_key = + Path::new("/boot/config/node_operator_private_key.pem").exists(); + let use_ssh_authorized_keys = Path::new("/boot/config/ssh_authorized_keys").exists(); + + let icos_settings = ICOSSettings { + node_reward_type, + mgmt_mac, + deployment_environment: deployment_json_settings.deployment.name.parse()?, + logging, + use_nns_public_key, + nns_urls: deployment_json_settings.nns.url.clone(), + use_node_operator_private_key, + use_ssh_authorized_keys, + icos_dev_settings: ICOSDevSettings::default(), + }; + + let hostos_settings = HostOSSettings { + vm_memory: deployment_json_settings.resources.memory, + vm_cpu: deployment_json_settings + .resources + .cpu + .clone() + .unwrap_or("kvm".to_string()), + verbose, + }; + + let guestos_settings = GuestOSSettings::default(); + + let hostos_config = HostOSConfig { + config_version: CONFIG_VERSION.to_string(), + network_settings, + icos_settings, + hostos_settings, + guestos_settings, + }; + + // HostOSConfig is safe to log; it does not contain any secret material + println!("New HostOSConfig: {:?}", hostos_config); + + serialize_and_write_config(hostos_config_json_path, &hostos_config)?; + + println!( + "New HostOSConfig has been written to {}", + hostos_config_json_path.display() + ); + } else { + println!("No update-config action taken."); + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::io::Write; + use tempfile::tempdir; + + #[test] + fn test_derive_mgmt_mac_from_hostname() -> Result<()> { + // Test with a valid hostname + let hostname = Some("guest-001122334455"); + let expected_mac: MacAddr6 = "00:11:22:33:44:55".parse().unwrap(); + let mac = derive_mgmt_mac_from_hostname(hostname)?; + assert_eq!(mac, expected_mac); + + // Test with invalid hostname (wrong prefix) + let invalid_hostname = Some("host-001122334455"); + let result = derive_mgmt_mac_from_hostname(invalid_hostname); + assert!(result.is_err()); + + // Test with invalid hostname (wrong length) + let invalid_hostname_length = Some("guest-00112233"); + let result = derive_mgmt_mac_from_hostname(invalid_hostname_length); + assert!(result.is_err()); + + // Test with None + let result = derive_mgmt_mac_from_hostname(None); + assert!(result.is_err()); + + Ok(()) + } + + #[test] + fn test_read_conf_file() -> Result<()> { + let dir = tempdir()?; + let file_path = dir.path().join("test.conf"); + let mut file = fs::File::create(&file_path)?; + writeln!(file, "key1=value1")?; + writeln!(file, "key2=value2")?; + writeln!(file, "# This is a comment")?; + writeln!(file, "key3 = value3")?; + + let conf_map = read_conf_file(&file_path)?; + + assert_eq!(conf_map.get("key1"), Some(&"value1".to_string())); + assert_eq!(conf_map.get("key2"), Some(&"value2".to_string())); + assert_eq!(conf_map.get("key3"), Some(&"value3".to_string())); + + Ok(()) + } + + #[test] + fn test_read_network_conf() -> Result<()> { + let dir = tempdir()?; + let network_conf_path = dir.path().join("network.conf"); + let mut file = fs::File::create(&network_conf_path)?; + writeln!(file, "ipv6_address=2001:db8::1/64")?; + writeln!(file, "ipv6_gateway=2001:db8::fffe")?; + writeln!(file, "ipv4_address=192.0.2.1/24")?; + writeln!(file, "ipv4_gateway=192.0.2.254")?; + writeln!(file, "domain=example.com")?; + writeln!(file, "hostname=guest-001122334455")?; + + let result = read_network_conf(dir.path())?; + + assert_eq!( + result.network_settings, + NetworkSettings { + ipv6_config: Ipv6Config::Fixed(FixedIpv6Config { + address: "2001:db8::1/64".to_string(), + gateway: "2001:db8::fffe".parse().unwrap(), + }), + ipv4_config: Some(Ipv4Config { + address: "192.0.2.1".parse().unwrap(), + prefix_length: 24, + gateway: "192.0.2.254".parse().unwrap(), + }), + domain_name: Some("example.com".to_string()), + } + ); + + assert_eq!(result.hostname, Some("guest-001122334455".to_string())); + + Ok(()) + } + + #[test] + fn test_read_filebeat_conf_existing_file() -> Result<()> { + let dir = tempdir()?; + let filebeat_conf_path = dir.path().join("filebeat.conf"); + let mut file = fs::File::create(&filebeat_conf_path)?; + writeln!(file, "elasticsearch_hosts=host1:9200,host2:9200")?; + writeln!(file, "elasticsearch_tags=tag1,tag2")?; + + let logging = read_filebeat_conf(dir.path())?; + + assert_eq!( + logging.elasticsearch_hosts, + "host1:9200,host2:9200".to_string() + ); + assert_eq!(logging.elasticsearch_tags, Some("tag1,tag2".to_string())); + + Ok(()) + } + + #[test] + fn test_read_filebeat_conf_missing_file() -> Result<()> { + let dir = tempdir()?; + let logging = read_filebeat_conf(dir.path())?; + + assert_eq!( + logging.elasticsearch_hosts, + "elasticsearch-node-0.mercury.dfinity.systems:443 \ + elasticsearch-node-1.mercury.dfinity.systems:443 \ + elasticsearch-node-2.mercury.dfinity.systems:443 \ + elasticsearch-node-3.mercury.dfinity.systems:443" + .to_string() + ); + assert_eq!(logging.elasticsearch_tags, None); + + Ok(()) + } + + #[test] + fn test_read_nns_conf_existing_file() -> Result<()> { + let dir = tempdir()?; + let nns_conf_path = dir.path().join("nns.conf"); + let mut file = fs::File::create(&nns_conf_path)?; + writeln!( + file, + "nns_url=https://nns1.example.com,https://nns2.example.com" + )?; + + let nns_urls = read_nns_conf(dir.path())?; + + assert_eq!( + nns_urls, + vec![ + Url::parse("https://nns1.example.com")?, + Url::parse("https://nns2.example.com")?, + ] + ); + + Ok(()) + } + + #[test] + fn test_read_nns_conf_missing_file() -> Result<()> { + let dir = tempdir()?; + let nns_urls = read_nns_conf(dir.path())?; + + assert_eq!( + nns_urls, + vec![ + Url::parse("https://icp-api.io")?, + Url::parse("https://icp0.io")?, + Url::parse("https://ic0.app")?, + ] + ); + + Ok(()) + } +} diff --git a/rs/ic_os/config_types/src/lib.rs b/rs/ic_os/config_types/src/lib.rs index d743517e69b..d2355887c56 100644 --- a/rs/ic_os/config_types/src/lib.rs +++ b/rs/ic_os/config_types/src/lib.rs @@ -14,6 +14,10 @@ //! - **Removing Fields**: To prevent backwards-compatibility deserialization errors, required fields must not be removed directly: In a first step, they have to be made optional and code that reads the value must be removed/handle missing values. In a second step, after the first step has rolled out to all OSes and there is no risk of a rollback, the field can be removed. Additionally, to avoid reintroducing a previously removed field, add your removed field to the RESERVED_FIELD_NAMES list. //! //! - **Renaming Fields**: Avoid renaming fields unless absolutely necessary. If you must rename a field, use `#[serde(rename = "old_name")]`. +//! +//! ## Logging Safety +//! +//! All configuration objects defined in this file are safe to log. They do not contain any secret material. use ic_types::malicious_behaviour::MaliciousBehaviour; use macaddr::MacAddr6; use serde::{Deserialize, Serialize}; From 82b4d4821b1a97684b2038a0c287637aee6e6622 Mon Sep 17 00:00:00 2001 From: Gabor Greif Date: Thu, 5 Dec 2024 09:46:25 +0100 Subject: [PATCH 16/23] chore: enable `best_effort_responses` in `drun` (#2965) --- rs/drun/src/main.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/rs/drun/src/main.rs b/rs/drun/src/main.rs index 6a63ca22a2e..7d2af5662f3 100644 --- a/rs/drun/src/main.rs +++ b/rs/drun/src/main.rs @@ -63,6 +63,10 @@ async fn drun_main() -> Result<(), String> { // For testing enhanced orthogonal persistence in Motoko, // enable Wasm Memory64 and re-configure the main memory capacity. hypervisor_config.embedders_config.feature_flags.wasm64 = FlagStatus::Enabled; + hypervisor_config + .embedders_config + .feature_flags + .best_effort_responses = FlagStatus::Enabled; hypervisor_config.embedders_config.max_wasm_memory_size = MAIN_MEMORY_CAPACITY; hypervisor_config.max_canister_memory_size = hypervisor_config.embedders_config.max_wasm_memory_size From b3ab4bf9a1eb8d851af21449f94a53b0892054c9 Mon Sep 17 00:00:00 2001 From: Rostislav Rumenov Date: Thu, 5 Dec 2024 10:52:07 +0100 Subject: [PATCH 17/23] chore: merge infallible errors, improve expect statements and use {:#?} for anyhow error formatting (#2964) Co-authored-by: Daniel Sharifi <40335219+DSharifi@users.noreply.github.com> --- .../quic_transport/src/connection_manager.rs | 87 +++++++++---------- rs/p2p/quic_transport/src/lib.rs | 2 +- rs/p2p/quic_transport/src/request_handler.rs | 5 ++ 3 files changed, 46 insertions(+), 48 deletions(-) diff --git a/rs/p2p/quic_transport/src/connection_manager.rs b/rs/p2p/quic_transport/src/connection_manager.rs index 4c8a66464c2..28348ed98ce 100644 --- a/rs/p2p/quic_transport/src/connection_manager.rs +++ b/rs/p2p/quic_transport/src/connection_manager.rs @@ -132,16 +132,14 @@ enum ConnectionEstablishError { peer_id: Option, cause: ConnectionError, }, - // The following errors should be infallible. + // The following errors should be infallible/internal. #[error("Failed to establish outbound connection to peer {peer_id:?} due to errors in the parameters being used. {cause:?}")] BadConnectParameters { peer_id: NodeId, cause: ConnectError, }, - #[error("No peer identity available.")] - MissingPeerIdentity, - #[error("Malformed peer identity. {0}")] - MalformedPeerIdentity(String), + #[error("Authentication failed: {0}")] + AuthenticationFailed(String), #[error("Incoming connection from {client:?}, which is > than {server:?}")] InvalidIncomingPeerId { client: NodeId, server: NodeId }, } @@ -218,9 +216,8 @@ pub(crate) fn start_connection_manager( .max_concurrent_uni_streams(MAX_CONCURRENT_UNI_STREAMS); let transport_config = Arc::new(transport_config); - let mut server_config = quinn::ServerConfig::with_crypto(Arc::new( - QuicServerConfig::try_from(rustls_server_config).unwrap(), - )); + let quinn_server_config = QuicServerConfig::try_from(rustls_server_config).expect("Conversion from RustTls config to Quinn config must succeed as long as this library and quinn use the same RustTls versions."); + let mut server_config = quinn::ServerConfig::with_crypto(Arc::new(quinn_server_config)); server_config.transport_config(transport_config.clone()); let endpoint = { @@ -261,6 +258,25 @@ impl ConnectionManager { self.node_id < *dst } + /// Conditions under which the node can start outbound connecting attempt + /// - the node is a designated dialer + /// - peer is in the subnet + /// - this node is part of the subnet (can happen when a node is removed from the subnet) + /// - there is no connect attempted + /// - there is no established connection + fn can_i_dial_to(&self, dst: &NodeId) -> bool { + let dialer = self.am_i_dialer(dst); + let peer_in_subnet = self.topology.is_member(dst); + let node_in_subnet = self.topology.is_member(&self.node_id); + let no_active_connection_attempt = !self.outbound_connecting.contains(dst); + let no_active_connection = !self.active_connections.contains(dst); + no_active_connection_attempt + && no_active_connection + && dialer + && node_in_subnet + && peer_in_subnet + } + pub async fn run(mut self, cancellation: CancellationToken) { loop { select! { @@ -268,7 +284,7 @@ impl ConnectionManager { break; }, Some(reconnect) = self.connect_queue.next() => { - self.handle_dial(reconnect.into_inner()) + self.handle_outbound_conn_attemp(reconnect.into_inner()) }, // Ignore the case if the sender is dropped. It is not transport's responsibility to make // sure topology senders are up and running. @@ -277,7 +293,7 @@ impl ConnectionManager { }, incoming = self.endpoint.accept() => { if let Some(incoming) = incoming { - self.handle_inbound(incoming); + self.handle_inbound_conn_attemp(incoming); } else { error!(self.log, "Quic endpoint closed. Stopping transport."); // Endpoint is closed. This indicates NOT graceful shutdown. @@ -344,7 +360,7 @@ impl ConnectionManager { self.endpoint.wait_idle().await; } - // Removes connection and sets peer status to disconnected + /// Removes connection and sets peer status to disconnected fn handled_closed_conn(&mut self, peer_id: NodeId) { self.peer_map.write().unwrap().remove(&peer_id); self.connect_queue.insert(peer_id, Duration::from_secs(0)); @@ -366,7 +382,7 @@ impl ConnectionManager { .server_config(subnet_nodes, self.topology.latest_registry_version()) { Ok(rustls_server_config) => { - let quic_server_config = QuicServerConfig::try_from(rustls_server_config).unwrap(); + let quic_server_config = QuicServerConfig::try_from(rustls_server_config).expect("Conversion from RustTls config to Quinn config must succeed as long as this library and quinn use the same RustTls versions."); let mut server_config = quinn::ServerConfig::with_crypto(Arc::new(quic_server_config)); server_config.transport_config(self.transport_config.clone()); @@ -379,16 +395,7 @@ impl ConnectionManager { // Connect/Disconnect from peers according to new topology for (peer_id, _) in self.topology.iter() { - let dialer = self.am_i_dialer(peer_id); - let no_active_connection_attempt = !self.outbound_connecting.contains(peer_id); - let no_active_connection = !self.active_connections.contains(peer_id); - let node_in_subnet = self.topology.is_member(&self.node_id); - // Add to delayqueue for connecting iff - // - Not currently trying to connect - // - No active connection to this peer - // - Our node id is lower -> This node is dialer. - // - This node is part of the subnet. This can happen when a node is removed from the subnet. - if no_active_connection_attempt && no_active_connection && dialer && node_in_subnet { + if self.can_i_dial_to(peer_id) { self.connect_queue.insert(*peer_id, Duration::from_secs(0)); } } @@ -415,25 +422,9 @@ impl ConnectionManager { self.metrics.peer_map_size.set(peer_map.len() as i64); } - fn handle_dial(&mut self, peer_id: NodeId) { - let not_dialer = !self.am_i_dialer(&peer_id); - let peer_not_in_subnet = self.topology.get_addr(&peer_id).is_none(); - let active_connection_attempt = self.outbound_connecting.contains(&peer_id); - let active_connection = self.active_connections.contains(&peer_id); - let node_not_in_subnet = !self.topology.is_member(&self.node_id); - - // Conditions under which we do NOT connect - // - prefer lower node id / dialing ourself - // - peer not in subnet - // - currently trying to connect - // - already connected - // - this node is not part of subnet. This can happen when a node is removed from the subnet. - if not_dialer - || peer_not_in_subnet - || active_connection_attempt - || active_connection - || node_not_in_subnet - { + /// Inserts a task into `outbound_connecting`` that handles an outbound connection attempt. (The function can also be called `handle_outbound`). + fn handle_outbound_conn_attemp(&mut self, peer_id: NodeId) { + if !self.can_i_dial_to(&peer_id) { return; } @@ -442,14 +433,14 @@ impl ConnectionManager { let addr = self .topology .get_addr(&peer_id) - .expect("Just checked this conditions"); + .expect("Just checked this conditions."); let endpoint = self.endpoint.clone(); let rustls_client_config = self .tls_config .client_config(peer_id, self.topology.latest_registry_version()) .expect("The rustls client config must be locally available, otherwise transport can't start."); let transport_config = self.transport_config.clone(); - let quinn_client_config = QuicClientConfig::try_from(rustls_client_config).unwrap(); + let quinn_client_config = QuicClientConfig::try_from(rustls_client_config).expect("Conversion from RustTls config to Quinn config must succeed as long as this library and quinn use the same RustTls versions."); let mut client_config = quinn::ClientConfig::new(Arc::new(quinn_client_config)); client_config.transport_config(transport_config); let conn_fut = async move { @@ -554,7 +545,7 @@ impl ConnectionManager { } /// Inserts a task into 'inbound_connecting' that handles an inbound connection attempt. - fn handle_inbound(&mut self, incoming: Incoming) { + fn handle_inbound_conn_attemp(&mut self, incoming: Incoming) { self.metrics.inbound_connection_total.inc(); let node_id = self.node_id; let conn_fut = async move { @@ -568,17 +559,19 @@ impl ConnectionManager { let rustls_certs = established .peer_identity() - .ok_or(ConnectionEstablishError::MissingPeerIdentity)? + .ok_or(ConnectionEstablishError::AuthenticationFailed( + "missing peer identity".to_string(), + ))? .downcast::>() .unwrap(); let rustls_cert = rustls_certs .first() - .ok_or(ConnectionEstablishError::MalformedPeerIdentity( + .ok_or(ConnectionEstablishError::AuthenticationFailed( "a single cert must be present".to_string(), ))?; let peer_id = node_id_from_certificate_der(rustls_cert.as_ref()) - .map_err(|err| ConnectionEstablishError::MalformedPeerIdentity(err.to_string()))?; + .map_err(|err| ConnectionEstablishError::AuthenticationFailed(err.to_string()))?; // Lower ID is dialer. So we reject if this nodes id is higher. if peer_id > node_id { diff --git a/rs/p2p/quic_transport/src/lib.rs b/rs/p2p/quic_transport/src/lib.rs index 2099719f653..bc1e32f291f 100644 --- a/rs/p2p/quic_transport/src/lib.rs +++ b/rs/p2p/quic_transport/src/lib.rs @@ -225,7 +225,7 @@ impl Transport for QuicTransport { .ok_or(anyhow!("Currently not connected to this peer"))? .clone(); peer.rpc(request).await.inspect_err(|err| { - info!(self.log, "{:?}", err); + info!(self.log, "{:#?}", err); }) } diff --git a/rs/p2p/quic_transport/src/request_handler.rs b/rs/p2p/quic_transport/src/request_handler.rs index f75dc0659e7..61ecb2e3231 100644 --- a/rs/p2p/quic_transport/src/request_handler.rs +++ b/rs/p2p/quic_transport/src/request_handler.rs @@ -33,6 +33,11 @@ use crate::{ const QUIC_METRIC_SCRAPE_INTERVAL: Duration = Duration::from_secs(5); +/// The event loop is responsible for managing a single connection. The event loop will exist if 1 out of 3 conditions happen. +/// 1. The connection is broken +/// 2. The peer closed the connection (e.g. due to topology change, peer thinking the connection is broken) +/// 3. The connection is closed locally (e.g. due to topology change, new incoming connection from the same peer) +/// /// Note: The event loop is cancel-safe. pub async fn start_stream_acceptor( log: ReplicaLogger, From c970dcbb1a25647fd95d8132b23277b3819fac34 Mon Sep 17 00:00:00 2001 From: Bas van Dijk Date: Thu, 5 Dec 2024 10:52:23 +0100 Subject: [PATCH 18/23] test(IDX): improve error message in xnet_slo_test (#2984) The `//rs/tests/message_routing/xnet:xnet_slo_3_subnets_test` is currently failing on master when starting canisters. This commit improves the error message we get in this case: ``` 2024-12-05 09:42:53.985 INFO[test:StdErr] Starting canister_idx=0 on subnet_idx=1 failed because of: unexpected result: "rejected" - Some("Error from Canister 5v3p4-iyaaa-aaaaa-qaaaa-cai: Canister called `ic0.trap` with message: failed to decode call arguments: Custom(Fail to decode argument 0 from table0 to record {\n payload_size_bytes : nat64;\n network_topology : vec vec principal;\n canister_to_subnet_rate : nat64;\n}\n\nCaused by:\n Subtyping error: Type mismatch at external/crate_index__candid-0.10.10/src/de.rs:994)\n Canister Backtrace:\nunknown function at index 683\n unknown function at index 50\nunknown function at index 451\n.\n Consider gracefully handling failures from this canister or altering the canister to handle exceptions. See documentation: http://internetcomputer.org/docs/current/references/execution-errors#trapped-explicitly") ``` --- rs/tests/message_routing/common/common.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rs/tests/message_routing/common/common.rs b/rs/tests/message_routing/common/common.rs index 017aefd8e4e..078d1cbe221 100644 --- a/rs/tests/message_routing/common/common.rs +++ b/rs/tests/message_routing/common/common.rs @@ -34,10 +34,10 @@ pub async fn start_all_canisters( let _: String = canister .update_("start", candid, input) .await - .unwrap_or_else(|_| { + .unwrap_or_else(|e| { panic!( - "Starting canister_idx={} on subnet_idx={}", - canister_idx, subnet_idx + "Starting canister_idx={} on subnet_idx={} failed because of: {}", + canister_idx, subnet_idx, e ) }); }); From df6e86cedeeba1c8f8066457c4f212100ce8ff5b Mon Sep 17 00:00:00 2001 From: Eero Kelly Date: Thu, 5 Dec 2024 18:32:51 +0800 Subject: [PATCH 19/23] fix: Revert from 24.04 build container (#2951) Revert changes from https://github.com/dfinity/ic/pull/1946 and https://github.com/dfinity/ic/pull/2858 until we can dynamically link to an older libraries for broader compatibility in distributed binaries. --------- Co-authored-by: IDX GitHub Automation Co-authored-by: Nicolas Mattia --- .devcontainer/devcontainer.json | 2 +- .github/workflows-source/ci-main.yml | 2 +- .github/workflows-source/ci-pr-only.yml | 2 +- .github/workflows-source/release-testing.yml | 2 +- .github/workflows-source/schedule-daily.yml | 2 +- .github/workflows-source/schedule-hourly.yml | 2 +- .github/workflows/ci-main.yml | 16 +- .github/workflows/ci-pr-only.yml | 8 +- .../workflows/container-mirror-images.json | 2 +- .github/workflows/release-testing.yml | 12 +- .github/workflows/rosetta-release.yml | 2 +- .github/workflows/schedule-daily.yml | 12 +- .github/workflows/schedule-hourly.yml | 6 +- .github/workflows/schedule-rust-bench.yml | 2 +- .github/workflows/schedule-weekly.yml | 2 +- .github/workflows/system-tests-k8s.yml | 4 +- .pre-commit-config.yaml | 8 - MODULE.bazel | 15 +- bazel/focal.lock.json | 1711 +++++++++++++++++ bazel/focal.yaml | 37 + ci/container/Dockerfile | 26 +- ci/container/TAG | 2 +- ci/container/build-image.sh | 3 +- ci/container/container-run.sh | 7 +- ci/container/files/packages.common | 4 +- .../manager/bazel_rust_dependency_manager.py | 7 +- requirements.txt | 1246 ++++++------ rs/tests/BUILD.bazel | 136 +- testnet/ansible/debug_vars_dump.yml | 2 +- .../ic_p8s_service_discovery_destroy.yml | 2 +- .../ic_p8s_service_discovery_install.yml | 6 +- .../roles/ic_guest/tasks/aux_disk_push.yml | 10 +- .../ansible/roles/ic_guest/tasks/create.yml | 30 +- .../ansible/roles/ic_guest/tasks/destroy.yml | 2 +- .../roles/ic_guest/tasks/disk_pull.yml | 4 +- .../roles/ic_guest/tasks/disk_push.yml | 10 +- .../ansible/roles/ic_guest/tasks/install.yml | 20 +- testnet/ansible/roles/ic_guest/tasks/main.yml | 24 +- .../roles/ic_guest/tasks/media_pull.yml | 2 +- .../roles/ic_guest/tasks/media_push.yml | 10 +- .../ansible/roles/ic_guest/tasks/prepare.yml | 6 +- .../ansible/roles/ic_guest/tasks/shutdown.yml | 4 +- .../ansible/roles/ic_guest/tasks/start.yml | 2 +- testnet/ansible/roles/ic_guest/tasks/stop.yml | 2 +- .../ic_guest/tasks/url_waitfor_connect.yml | 4 +- .../roles/ic_guest_prod/tasks/disk_pull.yml | 4 +- .../roles/ic_guest_prod/tasks/main.yml | 20 +- 47 files changed, 2574 insertions(+), 870 deletions(-) create mode 100755 bazel/focal.lock.json create mode 100644 bazel/focal.yaml diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index b3fc551e6d5..e5f7c85bcb8 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,5 +1,5 @@ { - "image": "ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3", + "image": "ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45", "remoteUser": "ubuntu", "privileged": true, "runArgs": [ diff --git a/.github/workflows-source/ci-main.yml b/.github/workflows-source/ci-main.yml index 0d7cd19b843..186d314b839 100644 --- a/.github/workflows-source/ci-main.yml +++ b/.github/workflows-source/ci-main.yml @@ -32,7 +32,7 @@ env: anchors: image: &image - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 dind-large-setup: &dind-large-setup runs-on: labels: dind-large diff --git a/.github/workflows-source/ci-pr-only.yml b/.github/workflows-source/ci-pr-only.yml index 4f0f439016e..6d13482cbb7 100644 --- a/.github/workflows-source/ci-pr-only.yml +++ b/.github/workflows-source/ci-pr-only.yml @@ -17,7 +17,7 @@ env: anchors: image: &image - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 dind-small-setup: &dind-small-setup timeout-minutes: 30 runs-on: diff --git a/.github/workflows-source/release-testing.yml b/.github/workflows-source/release-testing.yml index 205927c98c3..9b7c1c854d7 100644 --- a/.github/workflows-source/release-testing.yml +++ b/.github/workflows-source/release-testing.yml @@ -23,7 +23,7 @@ env: anchors: image: &image - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 dind-large-setup: &dind-large-setup runs-on: group: zh1 diff --git a/.github/workflows-source/schedule-daily.yml b/.github/workflows-source/schedule-daily.yml index 8e529a4c62f..0e7243e05e1 100644 --- a/.github/workflows-source/schedule-daily.yml +++ b/.github/workflows-source/schedule-daily.yml @@ -16,7 +16,7 @@ env: anchors: image: &image - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 dind-large-setup: &dind-large-setup runs-on: group: zh1 diff --git a/.github/workflows-source/schedule-hourly.yml b/.github/workflows-source/schedule-hourly.yml index d9a5c4af6c3..580333253f4 100644 --- a/.github/workflows-source/schedule-hourly.yml +++ b/.github/workflows-source/schedule-hourly.yml @@ -16,7 +16,7 @@ env: anchors: image: &image - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 dind-large-setup: &dind-large-setup runs-on: labels: dind-large diff --git a/.github/workflows/ci-main.yml b/.github/workflows/ci-main.yml index e3ac0d83aa7..1aeec11caac 100644 --- a/.github/workflows/ci-main.yml +++ b/.github/workflows/ci-main.yml @@ -30,7 +30,7 @@ jobs: bazel-test-all: name: Bazel Test All container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp -v /ceph-s3-info:/ceph-s3-info timeout-minutes: 90 @@ -129,7 +129,7 @@ jobs: runs-on: labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp -v /ceph-s3-info:/ceph-s3-info timeout-minutes: 90 @@ -223,7 +223,7 @@ jobs: runs-on: labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp -v /ceph-s3-info:/ceph-s3-info timeout-minutes: 90 @@ -258,7 +258,7 @@ jobs: runs-on: labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp -v /ceph-s3-info:/ceph-s3-info timeout-minutes: 90 @@ -293,7 +293,7 @@ jobs: runs-on: labels: dind-small container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 timeout-minutes: 30 steps: - name: Checkout @@ -323,7 +323,7 @@ jobs: build-ic: name: Build IC container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp -v /ceph-s3-info:/ceph-s3-info timeout-minutes: 90 @@ -426,7 +426,7 @@ jobs: cargo-clippy-linux: name: Cargo Clippy Linux container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 timeout-minutes: 30 runs-on: group: ch1 @@ -463,7 +463,7 @@ jobs: cargo-build-release-linux: name: Cargo Build Release Linux container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 timeout-minutes: 30 runs-on: group: ch1 diff --git a/.github/workflows/ci-pr-only.yml b/.github/workflows/ci-pr-only.yml index b5762b2af44..1d9a94645d2 100644 --- a/.github/workflows/ci-pr-only.yml +++ b/.github/workflows/ci-pr-only.yml @@ -20,7 +20,7 @@ jobs: runs-on: labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME if: ${{ github.event_name != 'merge_group' }} @@ -61,7 +61,7 @@ jobs: runs-on: labels: dind-small container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME if: ${{ github.event_name != 'merge_group' }} @@ -102,7 +102,7 @@ jobs: runs-on: labels: dind-small container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME if: ${{ github.event_name != 'merge_group' }} @@ -150,7 +150,7 @@ jobs: runs-on: labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME if: contains(github.event.pull_request.labels.*.name, 'CI_COVERAGE') diff --git a/.github/workflows/container-mirror-images.json b/.github/workflows/container-mirror-images.json index 7b1e5dbab66..cbe829ea3ed 100644 --- a/.github/workflows/container-mirror-images.json +++ b/.github/workflows/container-mirror-images.json @@ -3,7 +3,7 @@ { "image": "library/ubuntu", "repo": "docker.io", - "sha256": "77d57fd89366f7d16615794a5b53e124d742404e20f035c22032233f1826bd6a" + "sha256": "965fbcae990b0467ed5657caceaec165018ef44a4d2d46c7cdea80a9dff0d1ea" }, { "image": "library/ubuntu", diff --git a/.github/workflows/release-testing.yml b/.github/workflows/release-testing.yml index c84af5077db..f6cdc4c7500 100644 --- a/.github/workflows/release-testing.yml +++ b/.github/workflows/release-testing.yml @@ -28,7 +28,7 @@ jobs: group: zh1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 180 # 3 hours @@ -71,7 +71,7 @@ jobs: group: zh1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 180 # 3 hours @@ -114,7 +114,7 @@ jobs: group: zh1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 90 @@ -157,7 +157,7 @@ jobs: group: zh1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 60 @@ -205,7 +205,7 @@ jobs: group: zh1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 180 # 3 hours @@ -233,7 +233,7 @@ jobs: group: zh1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 180 # 3 hours diff --git a/.github/workflows/rosetta-release.yml b/.github/workflows/rosetta-release.yml index 02bc9f13215..2da864a410d 100644 --- a/.github/workflows/rosetta-release.yml +++ b/.github/workflows/rosetta-release.yml @@ -22,7 +22,7 @@ jobs: runs-on: labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 steps: - name: Checkout uses: actions/checkout@v4 diff --git a/.github/workflows/schedule-daily.yml b/.github/workflows/schedule-daily.yml index 71d716cd4b0..f6af2d9e132 100644 --- a/.github/workflows/schedule-daily.yml +++ b/.github/workflows/schedule-daily.yml @@ -15,7 +15,7 @@ jobs: bazel-test-bare-metal: name: Bazel Test Bare Metal container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 120 @@ -73,7 +73,7 @@ jobs: group: zh1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 720 # 12 hours @@ -115,7 +115,7 @@ jobs: group: zh1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 20 @@ -157,7 +157,7 @@ jobs: group: zh1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 480 @@ -204,7 +204,7 @@ jobs: group: zh1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 60 @@ -256,7 +256,7 @@ jobs: group: zh1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 120 diff --git a/.github/workflows/schedule-hourly.yml b/.github/workflows/schedule-hourly.yml index 400f8910f99..f374fb886c1 100644 --- a/.github/workflows/schedule-hourly.yml +++ b/.github/workflows/schedule-hourly.yml @@ -17,7 +17,7 @@ jobs: runs-on: labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 120 @@ -54,7 +54,7 @@ jobs: bazel-system-test-hourly: name: Bazel System Tests Hourly container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 120 @@ -97,7 +97,7 @@ jobs: runs-on: labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp timeout-minutes: 120 diff --git a/.github/workflows/schedule-rust-bench.yml b/.github/workflows/schedule-rust-bench.yml index d978aecb400..3b6eb645e14 100644 --- a/.github/workflows/schedule-rust-bench.yml +++ b/.github/workflows/schedule-rust-bench.yml @@ -22,7 +22,7 @@ jobs: # see linux-x86-64 runner group labels: rust-benchmarks container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 # running on bare metal machine using ubuntu user options: --user ubuntu -v /cache:/cache timeout-minutes: 720 # 12 hours diff --git a/.github/workflows/schedule-weekly.yml b/.github/workflows/schedule-weekly.yml index 9c19416ec36..918fe9b4864 100644 --- a/.github/workflows/schedule-weekly.yml +++ b/.github/workflows/schedule-weekly.yml @@ -14,7 +14,7 @@ jobs: runs-on: labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME -v /cache:/cache diff --git a/.github/workflows/system-tests-k8s.yml b/.github/workflows/system-tests-k8s.yml index c2f79a182b8..fa02d47251f 100644 --- a/.github/workflows/system-tests-k8s.yml +++ b/.github/workflows/system-tests-k8s.yml @@ -48,7 +48,7 @@ jobs: group: ln1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME -e KUBECONFIG --privileged --cgroupns host @@ -103,7 +103,7 @@ jobs: group: ln1 labels: dind-large container: - image: ghcr.io/dfinity/ic-build@sha256:6a548dd4cfee96260b6938fddaa74abc86fc0df5c407d4eef135694b64c288e3 + image: ghcr.io/dfinity/ic-build@sha256:4fd13b47285e783c3a6f35aadd9559d097c0de162a1cf221ead66ab1598d5d45 options: >- -e NODE_NAME -e KUBECONFIG --privileged --cgroupns host diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4e2f3ca9bb5..d9226120925 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -58,11 +58,3 @@ repos: language: system always_run: true verbose: true - -- repo: https://github.com/ansible/ansible-lint.git - rev: v24.5.0 - hooks: - - id: ansible-lint - always_run: false - files: (^|/)testnet/ansible/.+\.(yaml|yml)$ - args: ['-i', 'testnet/ansible/.ansible-lint-ignore', 'testnet/ansible'] diff --git a/MODULE.bazel b/MODULE.bazel index 74ed610fb57..76613152180 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -205,7 +205,7 @@ use_repo(oci, "minica", "minica_linux_amd64") # used by rosetta image oci.pull( name = "rust_base", - image = "gcr.io/distroless/cc-debian12@sha256:3310655aac0d85eb9d579792387af1ff3eb7a1667823478be58020ab0e0d97a8", + image = "gcr.io/distroless/cc-debian11@sha256:8e94f031353596c3fc9db6a2499bcc82dacc40cb71e0703476f9fad41677efdf", platforms = ["linux/amd64"], ) use_repo(oci, "rust_base", "rust_base_linux_amd64") @@ -213,7 +213,7 @@ use_repo(oci, "rust_base", "rust_base_linux_amd64") # used in various places as base oci.pull( name = "ubuntu_base", - image = "ghcr.io/dfinity/ubuntu@sha256:77d57fd89366f7d16615794a5b53e124d742404e20f035c22032233f1826bd6a", + image = "ghcr.io/dfinity/ubuntu@sha256:965fbcae990b0467ed5657caceaec165018ef44a4d2d46c7cdea80a9dff0d1ea", platforms = ["linux/amd64"], ) use_repo(oci, "ubuntu_base", "ubuntu_base_linux_amd64") @@ -252,6 +252,17 @@ bazel_dep(name = "rules_distroless", version = "0.3.8") apt = use_extension("@rules_distroless//apt:extensions.bzl", "apt") +## Packageset based on an Ubuntu focal snapshot, see manifest file +## for details +## To update, comment out the `lock` field below and run: +## bazel run @focal//:lock +apt.install( + name = "focal", + lock = "//bazel:focal.lock.json", + manifest = "//bazel:focal.yaml", +) +use_repo(apt, "focal") + # Packageset based on an Ubuntu noble snapshot, see manifest file # for details # To update, comment out the `lock` field below and run: diff --git a/bazel/focal.lock.json b/bazel/focal.lock.json new file mode 100755 index 00000000000..84bad015444 --- /dev/null +++ b/bazel/focal.lock.json @@ -0,0 +1,1711 @@ +{ + "packages": [ + { + "arch": "amd64", + "dependencies": [ + { + "key": "openssl_1.1.1f-1ubuntu2.22_amd64", + "name": "openssl", + "version": "1.1.1f-1ubuntu2.22" + }, + { + "key": "libssl1.1_1.1.1f-1ubuntu2.22_amd64", + "name": "libssl1.1", + "version": "1.1.1f-1ubuntu2.22" + }, + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + } + ], + "key": "ca-certificates_20230311ubuntu0.20.04.1_amd64", + "name": "ca-certificates", + "sha256": "ec23973dbb9317eb746d44aa6795556ee591d75eb9f7292a6672e69f215afcb9", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/c/ca-certificates/ca-certificates_20230311ubuntu0.20.04.1_all.deb", + "version": "20230311ubuntu0.20.04.1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "openssl_1.1.1f-1ubuntu2.22_amd64", + "name": "openssl", + "sha256": "d5b1e5de1627b6b895adbb030ccb8fc16125c5d0ef3604d898da136fc2bd6ad2", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/o/openssl/openssl_1.1.1f-1ubuntu2.22_amd64.deb", + "version": "1.1.1f-1ubuntu2.22" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + } + ], + "key": "libssl1.1_1.1.1f-1ubuntu2.22_amd64", + "name": "libssl1.1", + "sha256": "df9d07d552aab0c7e5b9fbcc568913acd20d50fb8b1e34876fa348b7a0c82d48", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2.22_amd64.deb", + "version": "1.1.1f-1ubuntu2.22" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "sha256": "a469164a97599aaef2552512acfd91c8830dc8d5e8053f9c02215ff9cd36673c", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/g/glibc/libc6_2.31-0ubuntu9.14_amd64.deb", + "version": "2.31-0ubuntu9.14" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "sha256": "231b4dbbe5865775f118cfa61394f1e16fa7102b6953a327e672499a20876d73", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libx/libxcrypt/libcrypt1_4.4.10-10ubuntu4_amd64.deb", + "version": "1:4.4.10-10ubuntu4" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "sha256": "4aa7b9c9f3225df65a750ae0ff5c890fc6108c82a609b3c5abd45d211838bf3c", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/g/gcc-10/libgcc-s1_10.5.0-1ubuntu1~20.04_amd64.deb", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "sha256": "8fac06791057b1bba6178466e160bb3ec2a795297f10e88e34daf762572ed5c0", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/g/gcc-10/gcc-10-base_10.5.0-1ubuntu1~20.04_amd64.deb", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "libselinux1_3.0-1build2_amd64", + "name": "libselinux1", + "version": "3.0-1build2" + }, + { + "key": "libpcre2-8-0_10.34-7ubuntu0.1_amd64", + "name": "libpcre2-8-0", + "version": "10.34-7ubuntu0.1" + }, + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "libattr1_1-2.4.48-5_amd64", + "name": "libattr1", + "version": "1:2.4.48-5" + }, + { + "key": "libacl1_2.2.53-6_amd64", + "name": "libacl1", + "version": "2.2.53-6" + } + ], + "key": "coreutils_8.30-3ubuntu2_amd64", + "name": "coreutils", + "sha256": "99aa50af84de1737735f2f51e570d60f5842aa1d4a3129527906e7ffda368853", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/c/coreutils/coreutils_8.30-3ubuntu2_amd64.deb", + "version": "8.30-3ubuntu2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libselinux1_3.0-1build2_amd64", + "name": "libselinux1", + "sha256": "1b8674b6f9e62fbae768d9ffbd686955d08db889ee4107d15ae02d1ec033cc7b", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libs/libselinux/libselinux1_3.0-1build2_amd64.deb", + "version": "3.0-1build2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libpcre2-8-0_10.34-7ubuntu0.1_amd64", + "name": "libpcre2-8-0", + "sha256": "5229f14c06074ae5f2d6dd7cef2c9dff8dd57c6ecf1381ff019fe5d233cc275f", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/p/pcre2/libpcre2-8-0_10.34-7ubuntu0.1_amd64.deb", + "version": "10.34-7ubuntu0.1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libattr1_1-2.4.48-5_amd64", + "name": "libattr1", + "sha256": "d916bb73d9a160ccaa48d997c823af528cb6b4174c5234c744b40ae5aa85ce98", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/a/attr/libattr1_2.4.48-5_amd64.deb", + "version": "1:2.4.48-5" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libacl1_2.2.53-6_amd64", + "name": "libacl1", + "sha256": "9fa9cc2f8eeccd8d29efcb998111b082432c65de75ca60ad9c333289bb3bb765", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/a/acl/libacl1_2.2.53-6_amd64.deb", + "version": "2.2.53-6" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "libsigsegv2_2.12-2_amd64", + "name": "libsigsegv2", + "version": "2.12-2" + }, + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "libreadline8_8.0-4_amd64", + "name": "libreadline8", + "version": "8.0-4" + }, + { + "key": "libtinfo6_6.2-0ubuntu2.1_amd64", + "name": "libtinfo6", + "version": "6.2-0ubuntu2.1" + }, + { + "key": "readline-common_8.0-4_amd64", + "name": "readline-common", + "version": "8.0-4" + }, + { + "key": "libmpfr6_4.0.2-1_amd64", + "name": "libmpfr6", + "version": "4.0.2-1" + }, + { + "key": "libgmp10_2-6.2.0-p-dfsg-4ubuntu0.1_amd64", + "name": "libgmp10", + "version": "2:6.2.0+dfsg-4ubuntu0.1" + } + ], + "key": "gawk_1-5.0.1-p-dfsg-1ubuntu0.1_amd64", + "name": "gawk", + "sha256": "5d48f427156f9d65f433a90af21033051102db391b1c1a7a07babd17dced5453", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/g/gawk/gawk_5.0.1+dfsg-1ubuntu0.1_amd64.deb", + "version": "1:5.0.1+dfsg-1ubuntu0.1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libsigsegv2_2.12-2_amd64", + "name": "libsigsegv2", + "sha256": "58279e0a8af9cc299d7195f4b5dc1922f4b0779c1166d3715b335944102f9f7e", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libs/libsigsegv/libsigsegv2_2.12-2_amd64.deb", + "version": "2.12-2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libreadline8_8.0-4_amd64", + "name": "libreadline8", + "sha256": "5c0e982098eeb1b69a1360f4dc20553397d0a41240f3b2fc2812ee3f02274a82", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/r/readline/libreadline8_8.0-4_amd64.deb", + "version": "8.0-4" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libtinfo6_6.2-0ubuntu2.1_amd64", + "name": "libtinfo6", + "sha256": "711a3a901c3a71561565558865699efa9c07a99fdc810ffe086a5636f89c6431", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/n/ncurses/libtinfo6_6.2-0ubuntu2.1_amd64.deb", + "version": "6.2-0ubuntu2.1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "readline-common_8.0-4_amd64", + "name": "readline-common", + "sha256": "38c3ac67e2dab4122a2f948f433c4cb5d5653d82b323f3ff30599797b7adee9f", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/r/readline/readline-common_8.0-4_all.deb", + "version": "8.0-4" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libmpfr6_4.0.2-1_amd64", + "name": "libmpfr6", + "sha256": "d098b67c3e1492a1c21a6a0d9befc88344e59a0d0eb6109384f85d8524a608cb", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/m/mpfr4/libmpfr6_4.0.2-1_amd64.deb", + "version": "4.0.2-1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libgmp10_2-6.2.0-p-dfsg-4ubuntu0.1_amd64", + "name": "libgmp10", + "sha256": "541dc5050b0cecdecbfe155641588ba08da645b61b11fc4bcb045f2f4773da43", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/g/gmp/libgmp10_6.2.0+dfsg-4ubuntu0.1_amd64.deb", + "version": "2:6.2.0+dfsg-4ubuntu0.1" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "libudev1_245.4-4ubuntu3.23_amd64", + "name": "libudev1", + "version": "245.4-4ubuntu3.23" + }, + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + } + ], + "key": "dosfstools_4.1-2_amd64", + "name": "dosfstools", + "sha256": "58492d84924cee267cb3cba0e64e1241accde967748e6a1f31820fa74892ee32", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/d/dosfstools/dosfstools_4.1-2_amd64.deb", + "version": "4.1-2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libudev1_245.4-4ubuntu3.23_amd64", + "name": "libudev1", + "sha256": "a9521025f12a45a91ed198fa10221e149caaba0f1156e07a641ebf02a5449805", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/s/systemd/libudev1_245.4-4ubuntu3.23_amd64.deb", + "version": "245.4-4ubuntu3.23" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "liblzma5_5.2.4-1ubuntu1.1_amd64", + "name": "liblzma5", + "version": "5.2.4-1ubuntu1.1" + }, + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + } + ], + "key": "libunwind8_1.2.1-9ubuntu0.1_amd64", + "name": "libunwind8", + "sha256": "269f62bfb9fc6d7e5bbfb600b8f688e2d94099ff9dc0fddc28351330915212d1", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libu/libunwind/libunwind8_1.2.1-9ubuntu0.1_amd64.deb", + "version": "1.2.1-9ubuntu0.1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "liblzma5_5.2.4-1ubuntu1.1_amd64", + "name": "liblzma5", + "sha256": "f545d34c86119802fbae869a09e1077a714e12a01ef6a3ef67fdc745e5db311d", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/x/xz-utils/liblzma5_5.2.4-1ubuntu1.1_amd64.deb", + "version": "5.2.4-1ubuntu1.1" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + } + ], + "key": "mtools_4.0.24-1_amd64", + "name": "mtools", + "sha256": "69e3dd6afa31643cf57a2475eb4156998cbc3144ec07985a9d65b0e78731743c", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/m/mtools/mtools_4.0.24-1_amd64.deb", + "version": "4.0.24-1" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "zlib1g_1-1.2.11.dfsg-2ubuntu1.5_amd64", + "name": "zlib1g", + "version": "1:1.2.11.dfsg-2ubuntu1.5" + }, + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "libssl1.1_1.1.1f-1ubuntu2.22_amd64", + "name": "libssl1.1", + "version": "1.1.1f-1ubuntu2.22" + }, + { + "key": "libselinux1_3.0-1build2_amd64", + "name": "libselinux1", + "version": "3.0-1build2" + }, + { + "key": "libpcre2-8-0_10.34-7ubuntu0.1_amd64", + "name": "libpcre2-8-0", + "version": "10.34-7ubuntu0.1" + }, + { + "key": "libgssapi-krb5-2_1.17-6ubuntu4.4_amd64", + "name": "libgssapi-krb5-2", + "version": "1.17-6ubuntu4.4" + }, + { + "key": "libkrb5support0_1.17-6ubuntu4.4_amd64", + "name": "libkrb5support0", + "version": "1.17-6ubuntu4.4" + }, + { + "key": "libkrb5-3_1.17-6ubuntu4.4_amd64", + "name": "libkrb5-3", + "version": "1.17-6ubuntu4.4" + }, + { + "key": "libkeyutils1_1.6-6ubuntu1.1_amd64", + "name": "libkeyutils1", + "version": "1.6-6ubuntu1.1" + }, + { + "key": "libk5crypto3_1.17-6ubuntu4.4_amd64", + "name": "libk5crypto3", + "version": "1.17-6ubuntu4.4" + }, + { + "key": "libcom-err2_1.45.5-2ubuntu1.1_amd64", + "name": "libcom-err2", + "version": "1.45.5-2ubuntu1.1" + }, + { + "key": "libfido2-1_1.3.1-1ubuntu2_amd64", + "name": "libfido2-1", + "version": "1.3.1-1ubuntu2" + }, + { + "key": "libudev1_245.4-4ubuntu3.23_amd64", + "name": "libudev1", + "version": "245.4-4ubuntu3.23" + }, + { + "key": "libcbor0.6_0.6.0-0ubuntu1_amd64", + "name": "libcbor0.6", + "version": "0.6.0-0ubuntu1" + }, + { + "key": "libedit2_3.1-20191231-1_amd64", + "name": "libedit2", + "version": "3.1-20191231-1" + }, + { + "key": "libtinfo6_6.2-0ubuntu2.1_amd64", + "name": "libtinfo6", + "version": "6.2-0ubuntu2.1" + }, + { + "key": "libbsd0_0.10.0-1_amd64", + "name": "libbsd0", + "version": "0.10.0-1" + }, + { + "key": "passwd_1-4.8.1-1ubuntu5.20.04.5_amd64", + "name": "passwd", + "version": "1:4.8.1-1ubuntu5.20.04.5" + }, + { + "key": "libpam-modules_1.3.1-5ubuntu4.7_amd64", + "name": "libpam-modules", + "version": "1.3.1-5ubuntu4.7" + }, + { + "key": "libpam-modules-bin_1.3.1-5ubuntu4.7_amd64", + "name": "libpam-modules-bin", + "version": "1.3.1-5ubuntu4.7" + }, + { + "key": "libpam0g_1.3.1-5ubuntu4.7_amd64", + "name": "libpam0g", + "version": "1.3.1-5ubuntu4.7" + }, + { + "key": "libaudit1_1-2.8.5-2ubuntu6_amd64", + "name": "libaudit1", + "version": "1:2.8.5-2ubuntu6" + }, + { + "key": "libcap-ng0_0.7.9-2.1build1_amd64", + "name": "libcap-ng0", + "version": "0.7.9-2.1build1" + }, + { + "key": "libaudit-common_1-2.8.5-2ubuntu6_amd64", + "name": "libaudit-common", + "version": "1:2.8.5-2ubuntu6" + }, + { + "key": "libdb5.3_5.3.28-p-dfsg1-0.6ubuntu2_amd64", + "name": "libdb5.3", + "version": "5.3.28+dfsg1-0.6ubuntu2" + }, + { + "key": "libsemanage1_3.0-1build2_amd64", + "name": "libsemanage1", + "version": "3.0-1build2" + }, + { + "key": "libsepol1_3.0-1ubuntu0.1_amd64", + "name": "libsepol1", + "version": "3.0-1ubuntu0.1" + }, + { + "key": "libbz2-1.0_1.0.8-2_amd64", + "name": "libbz2-1.0", + "version": "1.0.8-2" + }, + { + "key": "libsemanage-common_3.0-1build2_amd64", + "name": "libsemanage-common", + "version": "3.0-1build2" + }, + { + "key": "dpkg_1.19.7ubuntu3.2_amd64", + "name": "dpkg", + "version": "1.19.7ubuntu3.2" + }, + { + "key": "tar_1.30-p-dfsg-7ubuntu0.20.04.4_amd64", + "name": "tar", + "version": "1.30+dfsg-7ubuntu0.20.04.4" + }, + { + "key": "libacl1_2.2.53-6_amd64", + "name": "libacl1", + "version": "2.2.53-6" + }, + { + "key": "libzstd1_1.4.4-p-dfsg-3ubuntu0.1_amd64", + "name": "libzstd1", + "version": "1.4.4+dfsg-3ubuntu0.1" + }, + { + "key": "liblzma5_5.2.4-1ubuntu1.1_amd64", + "name": "liblzma5", + "version": "5.2.4-1ubuntu1.1" + }, + { + "key": "adduser_3.118ubuntu2_amd64", + "name": "adduser", + "version": "3.118ubuntu2" + } + ], + "key": "openssh-client_1-8.2p1-4ubuntu0.11_amd64", + "name": "openssh-client", + "sha256": "4df6dd0ce56ef519f884483b8b993027d54fd7f80baf4c415236fc5180b345c2", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/o/openssh/openssh-client_8.2p1-4ubuntu0.11_amd64.deb", + "version": "1:8.2p1-4ubuntu0.11" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "zlib1g_1-1.2.11.dfsg-2ubuntu1.5_amd64", + "name": "zlib1g", + "sha256": "bf67018f5303466eb468680b637a5d3f3bb17b9d44decf3d82d40b35babcd3e0", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2ubuntu1.5_amd64.deb", + "version": "1:1.2.11.dfsg-2ubuntu1.5" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libgssapi-krb5-2_1.17-6ubuntu4.4_amd64", + "name": "libgssapi-krb5-2", + "sha256": "873956e64e6e8c3dcbe2f62beaf919199a63e026483d1823410f9f79c954b1e2", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/k/krb5/libgssapi-krb5-2_1.17-6ubuntu4.4_amd64.deb", + "version": "1.17-6ubuntu4.4" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libkrb5support0_1.17-6ubuntu4.4_amd64", + "name": "libkrb5support0", + "sha256": "4997d0cbd7b5bcd940a981f5ab231de89febe3ee0ee53c113a0b4bf556cb97e1", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/k/krb5/libkrb5support0_1.17-6ubuntu4.4_amd64.deb", + "version": "1.17-6ubuntu4.4" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libkrb5-3_1.17-6ubuntu4.4_amd64", + "name": "libkrb5-3", + "sha256": "4457de873b2ad6f81311f6464568321751a4a8382526155f8560ec04ccfb9e94", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/k/krb5/libkrb5-3_1.17-6ubuntu4.4_amd64.deb", + "version": "1.17-6ubuntu4.4" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libkeyutils1_1.6-6ubuntu1.1_amd64", + "name": "libkeyutils1", + "sha256": "5a098b2585e549b997a979c38e5bfcc07a5df3efef4753e41ad4dab1122d576c", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/k/keyutils/libkeyutils1_1.6-6ubuntu1.1_amd64.deb", + "version": "1.6-6ubuntu1.1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libk5crypto3_1.17-6ubuntu4.4_amd64", + "name": "libk5crypto3", + "sha256": "046e292eb3beb82cc83365a4580a2c0448b484e8ce47f6057771cf44ccc9be45", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/k/krb5/libk5crypto3_1.17-6ubuntu4.4_amd64.deb", + "version": "1.17-6ubuntu4.4" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libcom-err2_1.45.5-2ubuntu1.1_amd64", + "name": "libcom-err2", + "sha256": "25ad33e306bf4ed08fbd39bf32db643f189c6ed1ec2acb19553d6a2bf69e590d", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/e/e2fsprogs/libcom-err2_1.45.5-2ubuntu1.1_amd64.deb", + "version": "1.45.5-2ubuntu1.1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libfido2-1_1.3.1-1ubuntu2_amd64", + "name": "libfido2-1", + "sha256": "3af0d3b8ef2cff01d9a2f04bca11cf16fd91a6f5d5bd2fcf915c7ff24ffacc04", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libf/libfido2/libfido2-1_1.3.1-1ubuntu2_amd64.deb", + "version": "1.3.1-1ubuntu2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libcbor0.6_0.6.0-0ubuntu1_amd64", + "name": "libcbor0.6", + "sha256": "bcf2b0dd3b62cc29d28b1a4150f6023c8b57c5dbf3cb4cdb8ee51bcdaf072739", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libc/libcbor/libcbor0.6_0.6.0-0ubuntu1_amd64.deb", + "version": "0.6.0-0ubuntu1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libedit2_3.1-20191231-1_amd64", + "name": "libedit2", + "sha256": "51a1190157e2dfe2c26bbdc114d1fc659456def2e78e6e9582809cf92a0a49a4", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libe/libedit/libedit2_3.1-20191231-1_amd64.deb", + "version": "3.1-20191231-1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libbsd0_0.10.0-1_amd64", + "name": "libbsd0", + "sha256": "4f668025fe923a372eb7fc368d6769fcfff6809233d48fd20fc072917cd82e60", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libb/libbsd/libbsd0_0.10.0-1_amd64.deb", + "version": "0.10.0-1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "passwd_1-4.8.1-1ubuntu5.20.04.5_amd64", + "name": "passwd", + "sha256": "839d12b834d2ec87945020482493b435ac0297eba9dcbd9ce049f1c542790de9", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/s/shadow/passwd_4.8.1-1ubuntu5.20.04.5_amd64.deb", + "version": "1:4.8.1-1ubuntu5.20.04.5" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libpam-modules_1.3.1-5ubuntu4.7_amd64", + "name": "libpam-modules", + "sha256": "aef66cca70cd3b384cc12d385076fc793206be25ffaf7ff52334e14f95d03e0e", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/p/pam/libpam-modules_1.3.1-5ubuntu4.7_amd64.deb", + "version": "1.3.1-5ubuntu4.7" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libpam-modules-bin_1.3.1-5ubuntu4.7_amd64", + "name": "libpam-modules-bin", + "sha256": "644e9b03c46d7b99d6bcfe6105d57eeeb00086273081633f5e89951df4600df3", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/p/pam/libpam-modules-bin_1.3.1-5ubuntu4.7_amd64.deb", + "version": "1.3.1-5ubuntu4.7" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libpam0g_1.3.1-5ubuntu4.7_amd64", + "name": "libpam0g", + "sha256": "afc769e31f70c5ccfd9f184019b382c0e4455b0b6494704ec010b811baeea511", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/p/pam/libpam0g_1.3.1-5ubuntu4.7_amd64.deb", + "version": "1.3.1-5ubuntu4.7" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libaudit1_1-2.8.5-2ubuntu6_amd64", + "name": "libaudit1", + "sha256": "fb539f4d848d00e9472444285831e0204b9e92e7ee65dbb72b56e144a6dbedf8", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/a/audit/libaudit1_2.8.5-2ubuntu6_amd64.deb", + "version": "1:2.8.5-2ubuntu6" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libcap-ng0_0.7.9-2.1build1_amd64", + "name": "libcap-ng0", + "sha256": "e8606a60a92aa8054d1620781889a8b709c3db8714e3b03b3987f81c7e168d06", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.1build1_amd64.deb", + "version": "0.7.9-2.1build1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libaudit-common_1-2.8.5-2ubuntu6_amd64", + "name": "libaudit-common", + "sha256": "9fa4a291df5682f5fee12aacdddd9ed09445c352c80d8a1af36056866e4b4906", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/a/audit/libaudit-common_2.8.5-2ubuntu6_all.deb", + "version": "1:2.8.5-2ubuntu6" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libdb5.3_5.3.28-p-dfsg1-0.6ubuntu2_amd64", + "name": "libdb5.3", + "sha256": "330775026b5e31340387fb58e12e40e241ca34cbbb6c0c28331bd83cb47c3656", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.6ubuntu2_amd64.deb", + "version": "5.3.28+dfsg1-0.6ubuntu2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libsemanage1_3.0-1build2_amd64", + "name": "libsemanage1", + "sha256": "0d0acf32d855061b30ee8d2a84421e08adbaa781ce8884730ec3c3e3b49bd1f9", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libs/libsemanage/libsemanage1_3.0-1build2_amd64.deb", + "version": "3.0-1build2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libsepol1_3.0-1ubuntu0.1_amd64", + "name": "libsepol1", + "sha256": "83811dd753b41ed1bb7cf31fdebcb51dd6e1ad452c4f5ce6a39791ec64152108", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libs/libsepol/libsepol1_3.0-1ubuntu0.1_amd64.deb", + "version": "3.0-1ubuntu0.1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libbz2-1.0_1.0.8-2_amd64", + "name": "libbz2-1.0", + "sha256": "f3632ec38402ca0f9c61a6854469f1a0eba9389d3f73827b466033c3d5bbec69", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/b/bzip2/libbz2-1.0_1.0.8-2_amd64.deb", + "version": "1.0.8-2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libsemanage-common_3.0-1build2_amd64", + "name": "libsemanage-common", + "sha256": "4141f803c811277d2ea56568a676a79f06017b8c5eb57891741808a27c55fffb", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libs/libsemanage/libsemanage-common_3.0-1build2_all.deb", + "version": "3.0-1build2" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "tar_1.30-p-dfsg-7ubuntu0.20.04.4_amd64", + "name": "tar", + "version": "1.30+dfsg-7ubuntu0.20.04.4" + }, + { + "key": "libselinux1_3.0-1build2_amd64", + "name": "libselinux1", + "version": "3.0-1build2" + }, + { + "key": "libpcre2-8-0_10.34-7ubuntu0.1_amd64", + "name": "libpcre2-8-0", + "version": "10.34-7ubuntu0.1" + }, + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "libacl1_2.2.53-6_amd64", + "name": "libacl1", + "version": "2.2.53-6" + }, + { + "key": "zlib1g_1-1.2.11.dfsg-2ubuntu1.5_amd64", + "name": "zlib1g", + "version": "1:1.2.11.dfsg-2ubuntu1.5" + }, + { + "key": "libzstd1_1.4.4-p-dfsg-3ubuntu0.1_amd64", + "name": "libzstd1", + "version": "1.4.4+dfsg-3ubuntu0.1" + }, + { + "key": "liblzma5_5.2.4-1ubuntu1.1_amd64", + "name": "liblzma5", + "version": "5.2.4-1ubuntu1.1" + }, + { + "key": "libbz2-1.0_1.0.8-2_amd64", + "name": "libbz2-1.0", + "version": "1.0.8-2" + } + ], + "key": "dpkg_1.19.7ubuntu3.2_amd64", + "name": "dpkg", + "sha256": "6fdb88a04deb4cc57a164a43de7efe2ab36fd31f6b1df9a3aca4b39fc0a4d87f", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/d/dpkg/dpkg_1.19.7ubuntu3.2_amd64.deb", + "version": "1.19.7ubuntu3.2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "tar_1.30-p-dfsg-7ubuntu0.20.04.4_amd64", + "name": "tar", + "sha256": "7387a202f0c71a5638c9514f854f2efd9809e24fa5212d1c7227b02bd06be5ec", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/t/tar/tar_1.30+dfsg-7ubuntu0.20.04.4_amd64.deb", + "version": "1.30+dfsg-7ubuntu0.20.04.4" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libzstd1_1.4.4-p-dfsg-3ubuntu0.1_amd64", + "name": "libzstd1", + "sha256": "7a4422dadb90510dc90765c308d65e61a3e244ceb3886394335e48cff7559e69", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libz/libzstd/libzstd1_1.4.4+dfsg-3ubuntu0.1_amd64.deb", + "version": "1.4.4+dfsg-3ubuntu0.1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "adduser_3.118ubuntu2_amd64", + "name": "adduser", + "sha256": "5f7ea9d1d52a2a9c349468f89d160230e21c8542faed1b1a97c23bce873e17b4", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/a/adduser/adduser_3.118ubuntu2_all.deb", + "version": "3.118ubuntu2" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "libpopt0_1.16-14_amd64", + "name": "libpopt0", + "version": "1.16-14" + }, + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "libacl1_2.2.53-6_amd64", + "name": "libacl1", + "version": "2.2.53-6" + }, + { + "key": "lsb-base_11.1.0ubuntu2_amd64", + "name": "lsb-base", + "version": "11.1.0ubuntu2" + }, + { + "key": "init-system-helpers_1.57_amd64", + "name": "init-system-helpers", + "version": "1.57" + }, + { + "key": "perl-base_5.30.0-9ubuntu0.5_amd64", + "name": "perl-base", + "version": "5.30.0-9ubuntu0.5" + }, + { + "key": "dpkg_1.19.7ubuntu3.2_amd64", + "name": "dpkg", + "version": "1.19.7ubuntu3.2" + }, + { + "key": "tar_1.30-p-dfsg-7ubuntu0.20.04.4_amd64", + "name": "tar", + "version": "1.30+dfsg-7ubuntu0.20.04.4" + }, + { + "key": "libselinux1_3.0-1build2_amd64", + "name": "libselinux1", + "version": "3.0-1build2" + }, + { + "key": "libpcre2-8-0_10.34-7ubuntu0.1_amd64", + "name": "libpcre2-8-0", + "version": "10.34-7ubuntu0.1" + }, + { + "key": "zlib1g_1-1.2.11.dfsg-2ubuntu1.5_amd64", + "name": "zlib1g", + "version": "1:1.2.11.dfsg-2ubuntu1.5" + }, + { + "key": "libzstd1_1.4.4-p-dfsg-3ubuntu0.1_amd64", + "name": "libzstd1", + "version": "1.4.4+dfsg-3ubuntu0.1" + }, + { + "key": "liblzma5_5.2.4-1ubuntu1.1_amd64", + "name": "liblzma5", + "version": "5.2.4-1ubuntu1.1" + }, + { + "key": "libbz2-1.0_1.0.8-2_amd64", + "name": "libbz2-1.0", + "version": "1.0.8-2" + } + ], + "key": "rsync_3.1.3-8ubuntu0.7_amd64", + "name": "rsync", + "sha256": "272c2377df80d037eeaa7af2fcbde7d31c25d807b7e889fc0a90fe8bb83f6263", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/r/rsync/rsync_3.1.3-8ubuntu0.7_amd64.deb", + "version": "3.1.3-8ubuntu0.7" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libpopt0_1.16-14_amd64", + "name": "libpopt0", + "sha256": "8548aa6b6a1f2a7b865a13f8643eb2a1caca9113f82c89f24e75c84d7099eed1", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/p/popt/libpopt0_1.16-14_amd64.deb", + "version": "1.16-14" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "lsb-base_11.1.0ubuntu2_amd64", + "name": "lsb-base", + "sha256": "eb40f6e96189cbde27a9cc4681f7ba7b4d51552d1ad74b876aea6a7ccb83f628", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/l/lsb/lsb-base_11.1.0ubuntu2_all.deb", + "version": "11.1.0ubuntu2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "init-system-helpers_1.57_amd64", + "name": "init-system-helpers", + "sha256": "5628715888e797fcddf2c5b3c3923b0a76dad78916025a52e19375703dd9586c", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/i/init-system-helpers/init-system-helpers_1.57_all.deb", + "version": "1.57" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "perl-base_5.30.0-9ubuntu0.5_amd64", + "name": "perl-base", + "sha256": "9997a2d7cf8a0ce2270730414c269e04a3ad6457280c21741c9e8f2489b18ba7", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/p/perl/perl-base_5.30.0-9ubuntu0.5_amd64.deb", + "version": "5.30.0-9ubuntu0.5" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "zlib1g_1-1.2.11.dfsg-2ubuntu1.5_amd64", + "name": "zlib1g", + "version": "1:1.2.11.dfsg-2ubuntu1.5" + }, + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "libstdc-p--p-6_10.5.0-1ubuntu1_20.04_amd64", + "name": "libstdc++6", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "liblzma5_5.2.4-1ubuntu1.1_amd64", + "name": "liblzma5", + "version": "5.2.4-1ubuntu1.1" + }, + { + "key": "liblz4-1_1.9.2-2ubuntu0.20.04.1_amd64", + "name": "liblz4-1", + "version": "1.9.2-2ubuntu0.20.04.1" + } + ], + "key": "zstd_1.4.4-p-dfsg-3_amd64", + "name": "zstd", + "sha256": "fa2c41d144ab414c70fbbf176dcd7adbe77fc1cb0a060fc7eec8a09ec94cd1c7", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/universe/libz/libzstd/zstd_1.4.4+dfsg-3_amd64.deb", + "version": "1.4.4+dfsg-3" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libstdc-p--p-6_10.5.0-1ubuntu1_20.04_amd64", + "name": "libstdc++6", + "sha256": "7f9222342d3551d063bf651319ec397c39278eeeb9ab5950ae0e8c28ef0af431", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/g/gcc-10/libstdc++6_10.5.0-1ubuntu1~20.04_amd64.deb", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "liblz4-1_1.9.2-2ubuntu0.20.04.1_amd64", + "name": "liblz4-1", + "sha256": "a9b706941eb8e2a0012869dbe63c7337fc9629aaf919563f63e92baa2a3a7c18", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/l/lz4/liblz4-1_1.9.2-2ubuntu0.20.04.1_amd64.deb", + "version": "1.9.2-2ubuntu0.20.04.1" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + } + ], + "key": "gzip_1.10-0ubuntu4.1_amd64", + "name": "gzip", + "sha256": "218a588ffe646e69dcde814634cb85bcaad1486da4fa3060937ab12ef04fed9c", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/g/gzip/gzip_1.10-0ubuntu4.1_amd64.deb", + "version": "1.10-0ubuntu4.1" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "libsystemd0_245.4-4ubuntu3.23_amd64", + "name": "libsystemd0", + "version": "245.4-4ubuntu3.23" + }, + { + "key": "liblzma5_5.2.4-1ubuntu1.1_amd64", + "name": "liblzma5", + "version": "5.2.4-1ubuntu1.1" + }, + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "liblz4-1_1.9.2-2ubuntu0.20.04.1_amd64", + "name": "liblz4-1", + "version": "1.9.2-2ubuntu0.20.04.1" + }, + { + "key": "libgcrypt20_1.8.5-5ubuntu1.1_amd64", + "name": "libgcrypt20", + "version": "1.8.5-5ubuntu1.1" + }, + { + "key": "libgpg-error0_1.37-1_amd64", + "name": "libgpg-error0", + "version": "1.37-1" + }, + { + "key": "libstdc-p--p-6_10.5.0-1ubuntu1_20.04_amd64", + "name": "libstdc++6", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "libseccomp2_2.5.1-1ubuntu1_20.04.2_amd64", + "name": "libseccomp2", + "version": "2.5.1-1ubuntu1~20.04.2" + }, + { + "key": "libgnutls30_3.6.13-2ubuntu1.10_amd64", + "name": "libgnutls30", + "version": "3.6.13-2ubuntu1.10" + }, + { + "key": "libunistring2_0.9.10-2_amd64", + "name": "libunistring2", + "version": "0.9.10-2" + }, + { + "key": "libtasn1-6_4.16.0-2_amd64", + "name": "libtasn1-6", + "version": "4.16.0-2" + }, + { + "key": "libp11-kit0_0.23.20-1ubuntu0.1_amd64", + "name": "libp11-kit0", + "version": "0.23.20-1ubuntu0.1" + }, + { + "key": "libffi7_3.3-4_amd64", + "name": "libffi7", + "version": "3.3-4" + }, + { + "key": "libnettle7_3.5.1-p-really3.5.1-2ubuntu0.2_amd64", + "name": "libnettle7", + "version": "3.5.1+really3.5.1-2ubuntu0.2" + }, + { + "key": "libidn2-0_2.2.0-2_amd64", + "name": "libidn2-0", + "version": "2.2.0-2" + }, + { + "key": "libhogweed5_3.5.1-p-really3.5.1-2ubuntu0.2_amd64", + "name": "libhogweed5", + "version": "3.5.1+really3.5.1-2ubuntu0.2" + }, + { + "key": "libgmp10_2-6.2.0-p-dfsg-4ubuntu0.1_amd64", + "name": "libgmp10", + "version": "2:6.2.0+dfsg-4ubuntu0.1" + }, + { + "key": "ubuntu-keyring_2020.02.11.4_amd64", + "name": "ubuntu-keyring", + "version": "2020.02.11.4" + }, + { + "key": "libapt-pkg6.0_2.0.10_amd64", + "name": "libapt-pkg6.0", + "version": "2.0.10" + }, + { + "key": "zlib1g_1-1.2.11.dfsg-2ubuntu1.5_amd64", + "name": "zlib1g", + "version": "1:1.2.11.dfsg-2ubuntu1.5" + }, + { + "key": "libzstd1_1.4.4-p-dfsg-3ubuntu0.1_amd64", + "name": "libzstd1", + "version": "1.4.4+dfsg-3ubuntu0.1" + }, + { + "key": "libudev1_245.4-4ubuntu3.23_amd64", + "name": "libudev1", + "version": "245.4-4ubuntu3.23" + }, + { + "key": "libbz2-1.0_1.0.8-2_amd64", + "name": "libbz2-1.0", + "version": "1.0.8-2" + }, + { + "key": "adduser_3.118ubuntu2_amd64", + "name": "adduser", + "version": "3.118ubuntu2" + }, + { + "key": "passwd_1-4.8.1-1ubuntu5.20.04.5_amd64", + "name": "passwd", + "version": "1:4.8.1-1ubuntu5.20.04.5" + }, + { + "key": "libpam-modules_1.3.1-5ubuntu4.7_amd64", + "name": "libpam-modules", + "version": "1.3.1-5ubuntu4.7" + }, + { + "key": "libpam-modules-bin_1.3.1-5ubuntu4.7_amd64", + "name": "libpam-modules-bin", + "version": "1.3.1-5ubuntu4.7" + }, + { + "key": "libselinux1_3.0-1build2_amd64", + "name": "libselinux1", + "version": "3.0-1build2" + }, + { + "key": "libpcre2-8-0_10.34-7ubuntu0.1_amd64", + "name": "libpcre2-8-0", + "version": "10.34-7ubuntu0.1" + }, + { + "key": "libpam0g_1.3.1-5ubuntu4.7_amd64", + "name": "libpam0g", + "version": "1.3.1-5ubuntu4.7" + }, + { + "key": "libaudit1_1-2.8.5-2ubuntu6_amd64", + "name": "libaudit1", + "version": "1:2.8.5-2ubuntu6" + }, + { + "key": "libcap-ng0_0.7.9-2.1build1_amd64", + "name": "libcap-ng0", + "version": "0.7.9-2.1build1" + }, + { + "key": "libaudit-common_1-2.8.5-2ubuntu6_amd64", + "name": "libaudit-common", + "version": "1:2.8.5-2ubuntu6" + }, + { + "key": "libdb5.3_5.3.28-p-dfsg1-0.6ubuntu2_amd64", + "name": "libdb5.3", + "version": "5.3.28+dfsg1-0.6ubuntu2" + }, + { + "key": "libsemanage1_3.0-1build2_amd64", + "name": "libsemanage1", + "version": "3.0-1build2" + }, + { + "key": "libsepol1_3.0-1ubuntu0.1_amd64", + "name": "libsepol1", + "version": "3.0-1ubuntu0.1" + }, + { + "key": "libsemanage-common_3.0-1build2_amd64", + "name": "libsemanage-common", + "version": "3.0-1build2" + } + ], + "key": "apt_2.0.10_amd64", + "name": "apt", + "sha256": "642a65e0cd79c4f3202976ae1e70c0339927947e6b5c77abac7abb35d0ca9404", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/a/apt/apt_2.0.10_amd64.deb", + "version": "2.0.10" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libsystemd0_245.4-4ubuntu3.23_amd64", + "name": "libsystemd0", + "sha256": "147da635fb19d7e2a25e3f9f26ee4de719704dd14decb84ed43a97c2454a015b", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/s/systemd/libsystemd0_245.4-4ubuntu3.23_amd64.deb", + "version": "245.4-4ubuntu3.23" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libgcrypt20_1.8.5-5ubuntu1.1_amd64", + "name": "libgcrypt20", + "sha256": "f040f801e2cced9bd82e484c23678cb9b464d04974a9cf4cf25fc9ad09d9e90d", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.5-5ubuntu1.1_amd64.deb", + "version": "1.8.5-5ubuntu1.1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libgpg-error0_1.37-1_amd64", + "name": "libgpg-error0", + "sha256": "4744163850851f60080a8b0fdf3dd3258d93114bd83f8024414737a2826da7fe", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libg/libgpg-error/libgpg-error0_1.37-1_amd64.deb", + "version": "1.37-1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libseccomp2_2.5.1-1ubuntu1_20.04.2_amd64", + "name": "libseccomp2", + "sha256": "e2fe929b69bec68e5e00eaecc2e661da0b31d8a5e9df298df43e2ba40983bdc1", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libs/libseccomp/libseccomp2_2.5.1-1ubuntu1~20.04.2_amd64.deb", + "version": "2.5.1-1ubuntu1~20.04.2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libgnutls30_3.6.13-2ubuntu1.10_amd64", + "name": "libgnutls30", + "sha256": "0e11ab42085fffeecdb9700288e63e59a883d1c081a37c878e3b807d5f8d6894", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/g/gnutls28/libgnutls30_3.6.13-2ubuntu1.10_amd64.deb", + "version": "3.6.13-2ubuntu1.10" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libunistring2_0.9.10-2_amd64", + "name": "libunistring2", + "sha256": "4ccfbc1e3a1cbc42616bd4fd407e01eb1434996c8500ac2fbccb7a2e1bcb166a", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libu/libunistring/libunistring2_0.9.10-2_amd64.deb", + "version": "0.9.10-2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libtasn1-6_4.16.0-2_amd64", + "name": "libtasn1-6", + "sha256": "f4d9cbcc2c915a58557ecf3ea6ffb42321c8a2f444945af57e64d9ce18744329", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2_amd64.deb", + "version": "4.16.0-2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libp11-kit0_0.23.20-1ubuntu0.1_amd64", + "name": "libp11-kit0", + "sha256": "e6c415cef9c7e829e43cac6cfea7371222005650cf4751eee22fd19710d4fa8e", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/p/p11-kit/libp11-kit0_0.23.20-1ubuntu0.1_amd64.deb", + "version": "0.23.20-1ubuntu0.1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libffi7_3.3-4_amd64", + "name": "libffi7", + "sha256": "4584aa8fef1bf5086168ce2f7078cd2ebd78fdc4cc0d86d958d795d4e0b0f50d", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libf/libffi/libffi7_3.3-4_amd64.deb", + "version": "3.3-4" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libnettle7_3.5.1-p-really3.5.1-2ubuntu0.2_amd64", + "name": "libnettle7", + "sha256": "3496aed83407fde71e0dc5988b28e8fd7f07a2f27fcf3e0f214c7cd86667eecd", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/n/nettle/libnettle7_3.5.1+really3.5.1-2ubuntu0.2_amd64.deb", + "version": "3.5.1+really3.5.1-2ubuntu0.2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libidn2-0_2.2.0-2_amd64", + "name": "libidn2-0", + "sha256": "698abe11d444c7e87c656c2083373d1e0fae04b3a64be0371adb2bb180092537", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/libi/libidn2/libidn2-0_2.2.0-2_amd64.deb", + "version": "2.2.0-2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libhogweed5_3.5.1-p-really3.5.1-2ubuntu0.2_amd64", + "name": "libhogweed5", + "sha256": "12d76fa6c9149af3da228e3062faaefd7d12e0c6fd3424579627672437a84f14", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/n/nettle/libhogweed5_3.5.1+really3.5.1-2ubuntu0.2_amd64.deb", + "version": "3.5.1+really3.5.1-2ubuntu0.2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "ubuntu-keyring_2020.02.11.4_amd64", + "name": "ubuntu-keyring", + "sha256": "2684fae16e90a9ff3cb4dc08a8af5bd9973d38763b60e5f458dea0b403a39788", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/u/ubuntu-keyring/ubuntu-keyring_2020.02.11.4_all.deb", + "version": "2020.02.11.4" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libapt-pkg6.0_2.0.10_amd64", + "name": "libapt-pkg6.0", + "sha256": "31c9edaf662200d5ae5c3ca891786a5717af247cd31c528f1b6a1644574c3490", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/a/apt/libapt-pkg6.0_2.0.10_amd64.deb", + "version": "2.0.10" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "debianutils_4.9.1_amd64", + "name": "debianutils", + "version": "4.9.1" + }, + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "base-files_11ubuntu5.8_amd64", + "name": "base-files", + "version": "11ubuntu5.8" + }, + { + "key": "libtinfo6_6.2-0ubuntu2.1_amd64", + "name": "libtinfo6", + "version": "6.2-0ubuntu2.1" + } + ], + "key": "bash_5.0-6ubuntu1.2_amd64", + "name": "bash", + "sha256": "58e03b3be46c54eeb22ede348baf2b8251a6f3e0ce4f0bf0191c516f90f1f283", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/b/bash/bash_5.0-6ubuntu1.2_amd64.deb", + "version": "5.0-6ubuntu1.2" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "debianutils_4.9.1_amd64", + "name": "debianutils", + "sha256": "535571a8898bb69476b1d0ed6893b4700c312bff14fc7687b7f1fdfe18f55a1f", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/d/debianutils/debianutils_4.9.1_amd64.deb", + "version": "4.9.1" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "base-files_11ubuntu5.8_amd64", + "name": "base-files", + "sha256": "f92bb87ad4347c4b9c33c3e4f013a483ac637d65e56d330f429b0be7c1739a74", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/b/base-files/base-files_11ubuntu5.8_amd64.deb", + "version": "11ubuntu5.8" + }, + { + "arch": "amd64", + "dependencies": [ + { + "key": "libperl5.30_5.30.0-9ubuntu0.5_amd64", + "name": "libperl5.30", + "version": "5.30.0-9ubuntu0.5" + }, + { + "key": "perl-modules-5.30_5.30.0-9ubuntu0.5_amd64", + "name": "perl-modules-5.30", + "version": "5.30.0-9ubuntu0.5" + }, + { + "key": "perl-base_5.30.0-9ubuntu0.5_amd64", + "name": "perl-base", + "version": "5.30.0-9ubuntu0.5" + }, + { + "key": "dpkg_1.19.7ubuntu3.2_amd64", + "name": "dpkg", + "version": "1.19.7ubuntu3.2" + }, + { + "key": "tar_1.30-p-dfsg-7ubuntu0.20.04.4_amd64", + "name": "tar", + "version": "1.30+dfsg-7ubuntu0.20.04.4" + }, + { + "key": "libselinux1_3.0-1build2_amd64", + "name": "libselinux1", + "version": "3.0-1build2" + }, + { + "key": "libpcre2-8-0_10.34-7ubuntu0.1_amd64", + "name": "libpcre2-8-0", + "version": "10.34-7ubuntu0.1" + }, + { + "key": "libc6_2.31-0ubuntu9.14_amd64", + "name": "libc6", + "version": "2.31-0ubuntu9.14" + }, + { + "key": "libcrypt1_1-4.4.10-10ubuntu4_amd64", + "name": "libcrypt1", + "version": "1:4.4.10-10ubuntu4" + }, + { + "key": "libgcc-s1_10.5.0-1ubuntu1_20.04_amd64", + "name": "libgcc-s1", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "gcc-10-base_10.5.0-1ubuntu1_20.04_amd64", + "name": "gcc-10-base", + "version": "10.5.0-1ubuntu1~20.04" + }, + { + "key": "libacl1_2.2.53-6_amd64", + "name": "libacl1", + "version": "2.2.53-6" + }, + { + "key": "zlib1g_1-1.2.11.dfsg-2ubuntu1.5_amd64", + "name": "zlib1g", + "version": "1:1.2.11.dfsg-2ubuntu1.5" + }, + { + "key": "libzstd1_1.4.4-p-dfsg-3ubuntu0.1_amd64", + "name": "libzstd1", + "version": "1.4.4+dfsg-3ubuntu0.1" + }, + { + "key": "liblzma5_5.2.4-1ubuntu1.1_amd64", + "name": "liblzma5", + "version": "5.2.4-1ubuntu1.1" + }, + { + "key": "libbz2-1.0_1.0.8-2_amd64", + "name": "libbz2-1.0", + "version": "1.0.8-2" + }, + { + "key": "libgdbm6_1.18.1-5_amd64", + "name": "libgdbm6", + "version": "1.18.1-5" + }, + { + "key": "libgdbm-compat4_1.18.1-5_amd64", + "name": "libgdbm-compat4", + "version": "1.18.1-5" + }, + { + "key": "libdb5.3_5.3.28-p-dfsg1-0.6ubuntu2_amd64", + "name": "libdb5.3", + "version": "5.3.28+dfsg1-0.6ubuntu2" + } + ], + "key": "perl_5.30.0-9ubuntu0.5_amd64", + "name": "perl", + "sha256": "e98e1122172fe4d32bf45503c2dedf5d4771a2995a7070f19bb8d31e998c0fb5", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/p/perl/perl_5.30.0-9ubuntu0.5_amd64.deb", + "version": "5.30.0-9ubuntu0.5" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libperl5.30_5.30.0-9ubuntu0.5_amd64", + "name": "libperl5.30", + "sha256": "7546cd8f74d9300823c4470a55abf2e6e97be1961e2033cd927e775b774eba3d", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/p/perl/libperl5.30_5.30.0-9ubuntu0.5_amd64.deb", + "version": "5.30.0-9ubuntu0.5" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "perl-modules-5.30_5.30.0-9ubuntu0.5_amd64", + "name": "perl-modules-5.30", + "sha256": "f2093f468af836518bfe80269a94526c75c9c67582e45adf1ec4ed305fd9ee6e", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/p/perl/perl-modules-5.30_5.30.0-9ubuntu0.5_all.deb", + "version": "5.30.0-9ubuntu0.5" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libgdbm6_1.18.1-5_amd64", + "name": "libgdbm6", + "sha256": "8ac1108585490fc2321df8d27cc565054003d86fab0ab8b8e2a4a2d050a53098", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/g/gdbm/libgdbm6_1.18.1-5_amd64.deb", + "version": "1.18.1-5" + }, + { + "arch": "amd64", + "dependencies": [], + "key": "libgdbm-compat4_1.18.1-5_amd64", + "name": "libgdbm-compat4", + "sha256": "4f93394bda7be86b94fb32ac1bdf1a0b7124b8150962b55512371c3a816a382d", + "url": "https://snapshot.ubuntu.com/ubuntu/20240301T030400Z/pool/main/g/gdbm/libgdbm-compat4_1.18.1-5_amd64.deb", + "version": "1.18.1-5" + } + ], + "version": 1 +} \ No newline at end of file diff --git a/bazel/focal.yaml b/bazel/focal.yaml new file mode 100644 index 00000000000..fa66b3c9613 --- /dev/null +++ b/bazel/focal.yaml @@ -0,0 +1,37 @@ +# Packages used by our ubuntu base, adapted from: +# https://github.com/GoogleContainerTools/rules_distroless/blob/2ce7b477def75579c49bab25266f953f30275c88/examples/ubuntu_snapshot/BUILD.bazel +# +# Anytime this file is changed, the lockfile needs to be regenerated. See WORKSPACE +# for instructions. +version: 1 + +# Various channels used to pull packages from +sources: + - channel: focal main + url: https://snapshot.ubuntu.com/ubuntu/20240301T030400Z + - channel: focal universe + url: https://snapshot.ubuntu.com/ubuntu/20240301T030400Z + - channel: focal-security main + url: https://snapshot.ubuntu.com/ubuntu/20240301T030400Z + - channel: focal-updates main + url: https://snapshot.ubuntu.com/ubuntu/20240301T030400Z + +archs: + - "amd64" + +packages: + - "ca-certificates" + - "coreutils" # for chmod + - "gawk" # for build-bootstrap-config-image + - "dosfstools" + - "libssl1.1" + - "libunwind8" + - "mtools" + - "openssh-client" # used to SSH into image + - "rsync" + - "zstd" + - "dpkg" # for apt list --installed + - "gzip" # for tar-ing up ic regsitry store in systests + - "apt" + - "bash" + - "perl" diff --git a/ci/container/Dockerfile b/ci/container/Dockerfile index d555d8f6694..04c9e77b6c4 100644 --- a/ci/container/Dockerfile +++ b/ci/container/Dockerfile @@ -1,6 +1,6 @@ # https://hub.docker.com/_/ubuntu -# noble-20240605 -FROM ubuntu@sha256:2e863c44b718727c860746568e1d54afd13b2fa71b160f5cd9058fc436217b30 +# focal-20240216 +FROM ubuntu@sha256:48c35f3de33487442af224ed4aabac19fd9bfbd91ee90e9471d412706b20ba73 ENV TZ=UTC ARG PACKAGE_FILE=ci/container/files/packages.common @@ -16,12 +16,14 @@ RUN curl -fsSL https://github.com/honeycombio/buildevents/releases/download/v${b RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg && \ echo "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null && \ + curl -fsSL "https://download.opensuse.org/repositories/devel:kubic:libcontainers:stable/xUbuntu_20.04/Release.key" | gpg --dearmor | sudo tee /etc/apt/trusted.gpg.d/devel_kubic_libcontainers_stable.gpg > /dev/null && \ + echo 'deb http://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/xUbuntu_20.04/ /' | sudo tee /etc/apt/sources.list.d/devel:kubic:libcontainers:stable.list && \ apt -yq update && \ apt -yqq install --no-install-recommends docker-ce-cli podman containernetworking-plugins buildah zip fuse-overlayfs xtail # install afl & gsutils deps for bazel-fuzzers RUN curl -L "https://apt.llvm.org/llvm-snapshot.gpg.key" | apt-key add - && \ - echo "deb http://apt.llvm.org/noble/ llvm-toolchain-noble-18 main" | tee -a /etc/apt/sources.list.d/llvm.list && \ + echo "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-18 main" | tee -a /etc/apt/sources.list.d/llvm.list && \ echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - && \ apt -yq update && \ @@ -48,14 +50,6 @@ RUN export DEBIAN_FRONTEND=noninteractive && \ cd .. && \ rm -rf e2fsdroid -# Install an older libtinfo for rules_haskell -RUN mkdir libtinfo && \ - cd libtinfo && \ - wget "http://security.ubuntu.com/ubuntu/pool/universe/n/ncurses/libtinfo5_6.3-2ubuntu0.1_amd64.deb" && \ - apt -yqq install ./libtinfo5_6.3-2ubuntu0.1_amd64.deb && \ - cd .. && \ - rm -rf libtinfo - ARG mkcert_version=1.4.4 ARG mkcert_sha=6d31c65b03972c6dc4a14ab429f2928300518b26503f58723e532d1b0a3bbb52 RUN curl -fsSL https://github.com/FiloSottile/mkcert/releases/download/v${mkcert_version}/mkcert-v${mkcert_version}-linux-amd64 -o /usr/local/bin/mkcert && \ @@ -99,9 +93,9 @@ RUN curl -sSL "https://github.com/mikefarah/yq/releases/download/v${YQ_VERSION}/ # Add mold linker ARG MOLD_BIN="/usr/local/bin/mold" -ARG MOLD_VERSION=2.31.0 +ARG MOLD_VERSION=2.4.1 RUN curl -sSL "https://github.com/rui314/mold/releases/download/v${MOLD_VERSION}/mold-${MOLD_VERSION}-$(uname -m)-linux.tar.gz" | tar -C /usr/local --strip-components=1 -xzf - && \ - echo "557c1f19b575e42714771727afa6c4f7fa9b98e41c65f281f7d6cd8f9863a817 ${MOLD_BIN}" | shasum -a 256 -c - && \ + echo "4d34b489a0810e71a937103f38e4b6c951abac36b5b60c58bc07b73efa7139cd ${MOLD_BIN}" | shasum -a 256 -c - && \ ln -sf "${MOLD_BIN}" "$(realpath /usr/bin/ld)" # Add IC SDK (dfx) @@ -128,8 +122,10 @@ RUN . /opt/nvm/nvm.sh && \ nvm install ${dependency_mgmt_default_node_version} && \ nvm install ${dependency_mgmt_oisy_wallet_node_version} -# Create user groups needed for github actions runner -RUN groupadd -g 1001 buildifier && useradd -ms /bin/bash -u 1001 -g 1001 -G ubuntu buildifier && \ +# Create user groups +RUN groupadd -g 1000 ubuntu && useradd -ms /bin/bash -u 1000 -g 1000 ubuntu && \ + # needed for github actions runner \ + groupadd -g 1001 buildifier && useradd -ms /bin/bash -u 1001 -g 1001 buildifier && \ # CI before script requires sudo \ echo "ALL ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers diff --git a/ci/container/TAG b/ci/container/TAG index 6e197c4645e..1b88ba01cf3 100644 --- a/ci/container/TAG +++ b/ci/container/TAG @@ -1 +1 @@ -234270bdfedab79b1aa7a761cad77195e52c801b287b6c3cdf2beb3239e8cc14 +7beeb6e69346fd10ff4421688aec841d072ac4f8c4c80d3b089d64aa280c0894 diff --git a/ci/container/build-image.sh b/ci/container/build-image.sh index 16eab1698a0..401fc422196 100755 --- a/ci/container/build-image.sh +++ b/ci/container/build-image.sh @@ -38,7 +38,8 @@ fi DOCKER_BUILDKIT=1 docker "${ARGS[@]}" build "${BUILD_ARGS[@]}" \ -t ic-build:"$DOCKER_IMG_TAG" \ - -t ghcr.io/dfinity/ic-build:latest \ + -t docker.io/dfinity/ic-build:"$DOCKER_IMG_TAG" \ + -t docker.io/dfinity/ic-build:latest \ -t ghcr.io/dfinity/ic-build:"$DOCKER_IMG_TAG" \ --build-arg RUST_VERSION="$RUST_VERSION" \ -f ci/container/Dockerfile . diff --git a/ci/container/container-run.sh b/ci/container/container-run.sh index b72411357e3..f64854dec08 100755 --- a/ci/container/container-run.sh +++ b/ci/container/container-run.sh @@ -69,12 +69,9 @@ IMAGE="$IMAGE:$IMAGE_TAG" if ! sudo podman "${PODMAN_ARGS[@]}" image exists $IMAGE; then if ! sudo podman "${PODMAN_ARGS[@]}" pull $IMAGE; then # fallback to building the image - docker() { - PODMAN_ARGS=(${PODMAN_ARGS}) - sudo podman "${PODMAN_ARGS[@]}" "$@" --network=host - } + docker() { sudo podman "${PODMAN_ARGS[@]}" "$@" --network=host; } export -f docker - PODMAN_ARGS="${PODMAN_ARGS[@]}" "$REPO_ROOT"/ci/container/build-image.sh "${BUILD_ARGS[@]}" + "$REPO_ROOT"/ci/container/build-image.sh "${BUILD_ARGS[@]}" unset -f docker fi fi diff --git a/ci/container/files/packages.common b/ci/container/files/packages.common index 63d24cea136..68954d42409 100644 --- a/ci/container/files/packages.common +++ b/ci/container/files/packages.common @@ -40,19 +40,19 @@ libunwind-dev libusb-1.0-0-dev libsqlite3-dev zlib1g-dev -libclang-18-dev +libclang-10-dev protobuf-compiler llvm liblmdb-dev liblzma-dev # Haskell +libtinfo5 libtinfo-dev libffi-dev libgmp-dev # IC-OS -fdisk cryptsetup-bin dosfstools fakeroot diff --git a/ci/src/dependencies/scanner/manager/bazel_rust_dependency_manager.py b/ci/src/dependencies/scanner/manager/bazel_rust_dependency_manager.py index c848a0ba2e5..02236cddcef 100644 --- a/ci/src/dependencies/scanner/manager/bazel_rust_dependency_manager.py +++ b/ci/src/dependencies/scanner/manager/bazel_rust_dependency_manager.py @@ -110,11 +110,8 @@ def __transitive_bazel_string_to_dependency(bazel_string: str) -> typing.Optiona # ['zstd', 'sys', '2.0.2', 'zstd.1.5.2'] for split_string in parse_result: # 2.0.2 - try: - if isinstance(version.parse(split_string), version.Version): - version_str = split_string - except version.InvalidVersion: - continue + if isinstance(version.parse(split_string), version.Version): + version_str = split_string # split with -2.0.2 # noinspection PyUnboundLocalVariable name = result[0].split(f"-{version_str}", 1)[0] diff --git a/requirements.txt b/requirements.txt index ae03b47294f..f57634be82f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,387 +4,329 @@ # # bazel run //:python-requirements.update # -ansible==10.5.0 \ - --hash=sha256:1d10bddba58f1edd0fe0b8e0387e0fafc519535066bb3c919c33b6ea3ec32a0f \ - --hash=sha256:ba2045031a7d60c203b6e5fe1f8eaddd53ae076f7ada910e636494384135face +ansible==6.6.0 \ + --hash=sha256:3c6812e9af1c243b8a9e5fc6cba618449813765e609caae39a757452e2818dee \ + --hash=sha256:e1b940a8d4f412123ede3c14b25cb99c3c8a4d535fd040aabf8e4fb7b0e4f092 # via -r requirements.in -ansible-core==2.17.5 \ - --hash=sha256:10f165b475cf2bc8d886e532cadb32c52ee6a533649793101d3166bca9bd3ea3 \ - --hash=sha256:ae7f51fd13dc9d57c9bcd43ef23f9c255ca8f18f4b5c0011a4f9b724d92c5a8e +ansible-core==2.13.8 \ + --hash=sha256:451be59d8494bc302ff3a70f4d079d64b069e10b482fe451a58e2e733be7b8c5 \ + --hash=sha256:fef93e996596550c537275e6acc573f383d121d5cee990f0f1d02353618f9af9 # via ansible antlr4-python3-runtime==4.9.3 \ --hash=sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b # via ic-py -anyio==4.6.2.post1 \ - --hash=sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c \ - --hash=sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d - # via httpx -bcrypt==4.2.0 \ - --hash=sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb \ - --hash=sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399 \ - --hash=sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291 \ - --hash=sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d \ - --hash=sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7 \ - --hash=sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170 \ - --hash=sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d \ - --hash=sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe \ - --hash=sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060 \ - --hash=sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184 \ - --hash=sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a \ - --hash=sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68 \ - --hash=sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c \ - --hash=sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458 \ - --hash=sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9 \ - --hash=sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328 \ - --hash=sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7 \ - --hash=sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34 \ - --hash=sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e \ - --hash=sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2 \ - --hash=sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5 \ - --hash=sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae \ - --hash=sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00 \ - --hash=sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841 \ - --hash=sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8 \ - --hash=sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221 \ - --hash=sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db +anyio==3.6.2 \ + --hash=sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421 \ + --hash=sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3 + # via httpcore +attrs==22.2.0 \ + --hash=sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836 \ + --hash=sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99 + # via pytest +bcrypt==4.0.1 \ + --hash=sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535 \ + --hash=sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0 \ + --hash=sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410 \ + --hash=sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd \ + --hash=sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665 \ + --hash=sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab \ + --hash=sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71 \ + --hash=sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215 \ + --hash=sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b \ + --hash=sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda \ + --hash=sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9 \ + --hash=sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a \ + --hash=sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344 \ + --hash=sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f \ + --hash=sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d \ + --hash=sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c \ + --hash=sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c \ + --hash=sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2 \ + --hash=sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d \ + --hash=sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e \ + --hash=sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3 # via paramiko cbor==1.0.0 \ --hash=sha256:13225a262ddf5615cbd9fd55a76a0d53069d18b07d2e9f19c39e6acb8609bbb6 # via -r requirements.in -cbor2==5.6.5 \ - --hash=sha256:3038523b8fc7de312bb9cdcbbbd599987e64307c4db357cd2030c472a6c7d468 \ - --hash=sha256:34cf5ab0dc310c3d0196caa6ae062dc09f6c242e2544bea01691fe60c0230596 \ - --hash=sha256:37096663a5a1c46a776aea44906cbe5fa3952f29f50f349179c00525d321c862 \ - --hash=sha256:38886c41bebcd7dca57739439455bce759f1e4c551b511f618b8e9c1295b431b \ - --hash=sha256:3d1a18b3a58dcd9b40ab55c726160d4a6b74868f2a35b71f9e726268b46dc6a2 \ - --hash=sha256:4586a4f65546243096e56a3f18f29d60752ee9204722377021b3119a03ed99ff \ - --hash=sha256:47261f54a024839ec649b950013c4de5b5f521afe592a2688eebbe22430df1dc \ - --hash=sha256:54c72a3207bb2d4480c2c39dad12d7971ce0853a99e3f9b8d559ce6eac84f66f \ - --hash=sha256:559dcf0d897260a9e95e7b43556a62253e84550b77147a1ad4d2c389a2a30192 \ - --hash=sha256:5b856fda4c50c5bc73ed3664e64211fa4f015970ed7a15a4d6361bd48462feaf \ - --hash=sha256:5ce13a27ef8fddf643fc17a753fe34aa72b251d03c23da6a560c005dc171085b \ - --hash=sha256:5cff06464b8f4ca6eb9abcba67bda8f8334a058abc01005c8e616728c387ad32 \ - --hash=sha256:61ceb77e6aa25c11c814d4fe8ec9e3bac0094a1f5bd8a2a8c95694596ea01e08 \ - --hash=sha256:66dd25dd919cddb0b36f97f9ccfa51947882f064729e65e6bef17c28535dc459 \ - --hash=sha256:6797b824b26a30794f2b169c0575301ca9b74ae99064e71d16e6ba0c9057de51 \ - --hash=sha256:6e14a1bf6269d25e02ef1d4008e0ce8880aa271d7c6b4c329dba48645764f60e \ - --hash=sha256:73b9647eed1493097db6aad61e03d8f1252080ee041a1755de18000dd2c05f37 \ - --hash=sha256:7488aec919f8408f9987a3a32760bd385d8628b23a35477917aa3923ff6ad45f \ - --hash=sha256:7f6d69f38f7d788b04c09ef2b06747536624b452b3c8b371ab78ad43b0296fab \ - --hash=sha256:824f202b556fc204e2e9a67d6d6d624e150fbd791278ccfee24e68caec578afd \ - --hash=sha256:863e0983989d56d5071270790e7ed8ddbda88c9e5288efdb759aba2efee670bc \ - --hash=sha256:87026fc838370d69f23ed8572939bd71cea2b3f6c8f8bb8283f573374b4d7f33 \ - --hash=sha256:8f747b7a9aaa58881a0c5b4cd4a9b8fb27eca984ed261a769b61de1f6b5bd1e6 \ - --hash=sha256:90bfa36944caccec963e6ab7e01e64e31cc6664535dc06e6295ee3937c999cbb \ - --hash=sha256:93676af02bd9a0b4a62c17c5b20f8e9c37b5019b1a24db70a2ee6cb770423568 \ - --hash=sha256:94885903105eec66d7efb55f4ce9884fdc5a4d51f3bd75b6fedc68c5c251511b \ - --hash=sha256:97a7e409b864fecf68b2ace8978eb5df1738799a333ec3ea2b9597bfcdd6d7d2 \ - --hash=sha256:a34ee99e86b17444ecbe96d54d909dd1a20e2da9f814ae91b8b71cf1ee2a95e4 \ - --hash=sha256:a3ac50485cf67dfaab170a3e7b527630e93cb0a6af8cdaa403054215dff93adf \ - --hash=sha256:a83b76367d1c3e69facbcb8cdf65ed6948678e72f433137b41d27458aa2a40cb \ - --hash=sha256:a88f029522aec5425fc2f941b3df90da7688b6756bd3f0472ab886d21208acbd \ - --hash=sha256:a8947c102cac79d049eadbd5e2ffb8189952890df7cbc3ee262bbc2f95b011a9 \ - --hash=sha256:ae2b49226224e92851c333b91d83292ec62eba53a19c68a79890ce35f1230d70 \ - --hash=sha256:b682820677ee1dbba45f7da11898d2720f92e06be36acec290867d5ebf3d7e09 \ - --hash=sha256:b9d15b638539b68aa5d5eacc56099b4543a38b2d2c896055dccf7e83d24b7955 \ - --hash=sha256:e16c4a87fc999b4926f5c8f6c696b0d251b4745bc40f6c5aee51d69b30b15ca2 \ - --hash=sha256:e25c2aebc9db99af7190e2261168cdde8ed3d639ca06868e4f477cf3a228a8e9 \ - --hash=sha256:f0d0a9c5aabd48ecb17acf56004a7542a0b8d8212be52f3102b8218284bd881e \ - --hash=sha256:f2764804ffb6553283fc4afb10a280715905a4cea4d6dc7c90d3e89c4a93bc8d \ - --hash=sha256:f4c7dbcdc59ea7f5a745d3e30ee5e6b6ff5ce7ac244aa3de6786391b10027bb3 \ - --hash=sha256:f91e6d74fa6917df31f8757fdd0e154203b0dd0609ec53eb957016a2b474896a \ - --hash=sha256:fa61a02995f3a996c03884cf1a0b5733f88cbfd7fa0e34944bf678d4227ee712 \ - --hash=sha256:fde21ac1cf29336a31615a2c469a9cb03cf0add3ae480672d4d38cda467d07fc \ - --hash=sha256:fe11c2eb518c882cfbeed456e7a552e544893c17db66fe5d3230dbeaca6b615c +cbor2==5.4.6 \ + --hash=sha256:0b956f19e93ba3180c336282cd1b6665631f2d3a196a9c19b29a833bf979e7a4 \ + --hash=sha256:0bd12c54a48949d11f5ffc2fa27f5df1b4754111f5207453e5fae3512ebb3cab \ + --hash=sha256:0d2b926b024d3a1549b819bc82fdc387062bbd977b0299dd5fa5e0ea3267b98b \ + --hash=sha256:1618d16e310f7ffed141762b0ff5d8bb6b53ad449406115cc465bf04213cefcf \ + --hash=sha256:181ac494091d1f9c5bb373cd85514ce1eb967a8cf3ec298e8dfa8878aa823956 \ + --hash=sha256:1835536e76ea16e88c934aac5e369ba9f93d495b01e5fa2d93f0b4986b89146d \ + --hash=sha256:1c12c0ab78f5bc290b08a79152a8621822415836a86f8f4b50dadba371736fda \ + --hash=sha256:24144822f8d2b0156f4cda9427f071f969c18683ffed39663dc86bc0a75ae4dd \ + --hash=sha256:309fffbb7f561d67f02095d4b9657b73c9220558701c997e9bfcfbca2696e927 \ + --hash=sha256:3316f09a77af85e7772ecfdd693b0f450678a60b1aee641bac319289757e3fa0 \ + --hash=sha256:3545b16f9f0d5f34d4c99052829c3726020a07be34c99c250d0df87418f02954 \ + --hash=sha256:39452c799453f5bf33281ffc0752c620b8bfa0b7c13070b87d370257a1311976 \ + --hash=sha256:3950be57a1698086cf26d8710b4e5a637b65133c5b1f9eec23967d4089d8cfed \ + --hash=sha256:456cdff668a50a52fdb8aa6d0742511e43ed46d6a5b463dba80a5a720fa0d320 \ + --hash=sha256:4b9f3924da0e460a93b3674c7e71020dd6c9e9f17400a34e52a88c0af2dcd2aa \ + --hash=sha256:4bbbdb2e3ef274865dc3f279aae109b5d94f4654aea3c72c479fb37e4a1e7ed7 \ + --hash=sha256:4ce1a2c272ba8523a55ea2f1d66e3464e89fa0e37c9a3d786a919fe64e68dbd7 \ + --hash=sha256:56dfa030cd3d67e5b6701d3067923f2f61536a8ffb1b45be14775d1e866b59ae \ + --hash=sha256:6709d97695205cd08255363b54afa035306d5302b7b5e38308c8ff5a47e60f2a \ + --hash=sha256:6e1b5aee920b6a2f737aa12e2b54de3826b09f885a7ce402db84216343368140 \ + --hash=sha256:6f9c702bee2954fffdfa3de95a5af1a6b1c5f155e39490353d5654d83bb05bb9 \ + --hash=sha256:78304df140b9e13b93bcbb2aecee64c9aaa9f1cadbd45f043b5e7b93cc2f21a2 \ + --hash=sha256:79e048e623846d60d735bb350263e8fdd36cb6195d7f1a2b57eacd573d9c0b33 \ + --hash=sha256:7bbd3470eb685325398023e335be896b74f61b014896604ed45049a7b7b6d8ac \ + --hash=sha256:80ac8ba450c7a41c5afe5f7e503d3092442ed75393e1de162b0bf0d97edf7c7f \ + --hash=sha256:9394ca49ecdf0957924e45d09a4026482d184a465a047f60c4044eb464c43de9 \ + --hash=sha256:94f844d0e232aca061a86dd6ff191e47ba0389ddd34acb784ad9a41594dc99a4 \ + --hash=sha256:96087fa5336ebfc94465c0768cd5de0fcf9af3840d2cf0ce32f5767855f1a293 \ + --hash=sha256:b893500db0fe033e570c3adc956af6eefc57e280026bd2d86fd53da9f1e594d7 \ + --hash=sha256:c285a2cb2c04004bfead93df89d92a0cef1874ad337d0cb5ea53c2c31e97bfdb \ + --hash=sha256:d2984a488f350aee1d54fa9cb8c6a3c1f1f5b268abbc91161e47185de4d829f3 \ + --hash=sha256:d54bd840b4fe34f097b8665fc0692c7dd175349e53976be6c5de4433b970daa4 \ + --hash=sha256:db9eb582fce972f0fa429d8159b7891ff8deccb7affc4995090afc61ce0d328a \ + --hash=sha256:e5094562dfe3e5583202b93ef7ca5082c2ba5571accb2c4412d27b7d0ba8a563 \ + --hash=sha256:e73ca40dd3c7210ff776acff9869ddc9ff67bae7c425b58e5715dcf55275163f \ + --hash=sha256:ff95b33e5482313a74648ca3620c9328e9f30ecfa034df040b828e476597d352 # via ic-py -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via # httpcore # httpx # requests -cffi==1.17.1 \ - --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ - --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ - --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ - --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ - --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ - --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ - --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ - --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ - --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ - --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ - --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ - --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ - --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ - --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ - --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ - --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ - --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ - --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ - --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ - --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ - --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ - --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ - --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ - --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ - --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ - --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ - --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ - --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ - --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ - --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ - --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ - --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ - --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ - --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ - --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ - --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ - --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ - --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ - --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ - --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ - --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ - --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ - --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ - --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ - --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ - --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ - --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ - --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ - --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ - --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ - --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ - --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ - --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ - --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ - --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ - --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ - --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ - --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ - --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ - --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ - --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ - --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ - --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ - --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ - --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ - --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ - --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 # via # cryptography # pynacl -charset-normalizer==3.4.0 \ - --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \ - --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \ - --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \ - --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \ - --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \ - --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \ - --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \ - --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \ - --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \ - --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \ - --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \ - --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \ - --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \ - --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \ - --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \ - --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \ - --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \ - --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \ - --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \ - --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \ - --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \ - --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \ - --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \ - --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \ - --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \ - --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \ - --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \ - --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \ - --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \ - --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \ - --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \ - --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \ - --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \ - --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \ - --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \ - --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \ - --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \ - --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \ - --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \ - --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \ - --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \ - --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \ - --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \ - --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \ - --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \ - --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \ - --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \ - --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \ - --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \ - --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \ - --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \ - --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \ - --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \ - --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \ - --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \ - --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \ - --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \ - --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \ - --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \ - --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \ - --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \ - --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \ - --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \ - --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \ - --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \ - --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \ - --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \ - --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \ - --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \ - --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \ - --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \ - --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \ - --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \ - --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \ - --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \ - --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \ - --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \ - --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \ - --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \ - --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \ - --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \ - --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \ - --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \ - --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \ - --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \ - --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \ - --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \ - --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \ - --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \ - --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \ - --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \ - --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \ - --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \ - --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \ - --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \ - --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \ - --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \ - --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \ - --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \ - --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \ - --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \ - --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \ - --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \ - --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \ - --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482 +charset-normalizer==3.1.0 \ + --hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \ + --hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \ + --hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \ + --hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \ + --hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \ + --hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \ + --hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \ + --hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \ + --hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \ + --hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \ + --hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \ + --hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \ + --hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \ + --hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \ + --hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \ + --hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \ + --hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \ + --hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \ + --hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \ + --hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \ + --hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \ + --hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \ + --hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \ + --hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \ + --hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \ + --hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \ + --hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \ + --hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \ + --hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \ + --hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \ + --hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \ + --hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \ + --hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \ + --hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \ + --hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \ + --hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \ + --hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \ + --hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \ + --hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \ + --hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \ + --hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \ + --hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \ + --hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \ + --hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \ + --hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \ + --hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \ + --hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \ + --hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \ + --hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \ + --hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \ + --hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \ + --hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \ + --hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \ + --hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \ + --hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \ + --hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \ + --hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \ + --hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \ + --hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \ + --hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \ + --hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \ + --hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \ + --hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \ + --hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \ + --hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \ + --hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \ + --hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \ + --hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \ + --hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \ + --hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \ + --hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \ + --hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \ + --hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \ + --hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \ + --hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab # via requests -configargparse==1.7 \ - --hash=sha256:d249da6591465c6c26df64a9f73d2536e743be2f244eb3ebe61114af2f94f86b \ - --hash=sha256:e7067471884de5478c58a511e529f0f9bd1c66bfef1dea90935438d6c23306d1 +configargparse==1.5.3 \ + --hash=sha256:18f6535a2db9f6e02bd5626cc7455eac3e96b9ab3d969d366f9aafd5c5c00fe7 \ + --hash=sha256:1b0b3cbf664ab59dada57123c81eff3d9737e0d11d8cf79e3d6eb10823f1739f # via -r requirements.in -coverage[toml]==7.6.3 \ - --hash=sha256:04f2189716e85ec9192df307f7c255f90e78b6e9863a03223c3b998d24a3c6c6 \ - --hash=sha256:0c6c0f4d53ef603397fc894a895b960ecd7d44c727df42a8d500031716d4e8d2 \ - --hash=sha256:0ca37993206402c6c35dc717f90d4c8f53568a8b80f0bf1a1b2b334f4d488fba \ - --hash=sha256:12f9515d875859faedb4144fd38694a761cd2a61ef9603bf887b13956d0bbfbb \ - --hash=sha256:1990b1f4e2c402beb317840030bb9f1b6a363f86e14e21b4212e618acdfce7f6 \ - --hash=sha256:2341a78ae3a5ed454d524206a3fcb3cec408c2a0c7c2752cd78b606a2ff15af4 \ - --hash=sha256:23bb63ae3f4c645d2d82fa22697364b0046fbafb6261b258a58587441c5f7bd0 \ - --hash=sha256:27bd5f18d8f2879e45724b0ce74f61811639a846ff0e5c0395b7818fae87aec6 \ - --hash=sha256:2dc7d6b380ca76f5e817ac9eef0c3686e7834c8346bef30b041a4ad286449990 \ - --hash=sha256:331b200ad03dbaa44151d74daeb7da2cf382db424ab923574f6ecca7d3b30de3 \ - --hash=sha256:365defc257c687ce3e7d275f39738dcd230777424117a6c76043459db131dd43 \ - --hash=sha256:37be7b5ea3ff5b7c4a9db16074dc94523b5f10dd1f3b362a827af66a55198175 \ - --hash=sha256:3c2e6fa98032fec8282f6b27e3f3986c6e05702828380618776ad794e938f53a \ - --hash=sha256:40e8b1983080439d4802d80b951f4a93d991ef3261f69e81095a66f86cf3c3c6 \ - --hash=sha256:43517e1f6b19f610a93d8227e47790722c8bf7422e46b365e0469fc3d3563d97 \ - --hash=sha256:43b32a06c47539fe275106b376658638b418c7cfdfff0e0259fbf877e845f14b \ - --hash=sha256:43d6a66e33b1455b98fc7312b124296dad97a2e191c80320587234a77b1b736e \ - --hash=sha256:4c59d6a4a4633fad297f943c03d0d2569867bd5372eb5684befdff8df8522e39 \ - --hash=sha256:52ac29cc72ee7e25ace7807249638f94c9b6a862c56b1df015d2b2e388e51dbd \ - --hash=sha256:54356a76b67cf8a3085818026bb556545ebb8353951923b88292556dfa9f812d \ - --hash=sha256:583049c63106c0555e3ae3931edab5669668bbef84c15861421b94e121878d3f \ - --hash=sha256:6d99198203f0b9cb0b5d1c0393859555bc26b548223a769baf7e321a627ed4fc \ - --hash=sha256:6da42bbcec130b188169107ecb6ee7bd7b4c849d24c9370a0c884cf728d8e976 \ - --hash=sha256:6e484e479860e00da1f005cd19d1c5d4a813324e5951319ac3f3eefb497cc549 \ - --hash=sha256:70a6756ce66cd6fe8486c775b30889f0dc4cb20c157aa8c35b45fd7868255c5c \ - --hash=sha256:70d24936ca6c15a3bbc91ee9c7fc661132c6f4c9d42a23b31b6686c05073bde5 \ - --hash=sha256:71967c35828c9ff94e8c7d405469a1fb68257f686bca7c1ed85ed34e7c2529c4 \ - --hash=sha256:79644f68a6ff23b251cae1c82b01a0b51bc40c8468ca9585c6c4b1aeee570e0b \ - --hash=sha256:87cd2e29067ea397a47e352efb13f976eb1b03e18c999270bb50589323294c6e \ - --hash=sha256:8d4c6ea0f498c7c79111033a290d060c517853a7bcb2f46516f591dab628ddd3 \ - --hash=sha256:9134032f5aa445ae591c2ba6991d10136a1f533b1d2fa8f8c21126468c5025c6 \ - --hash=sha256:921fbe13492caf6a69528f09d5d7c7d518c8d0e7b9f6701b7719715f29a71e6e \ - --hash=sha256:99670790f21a96665a35849990b1df447993880bb6463a0a1d757897f30da929 \ - --hash=sha256:9975442f2e7a5cfcf87299c26b5a45266ab0696348420049b9b94b2ad3d40234 \ - --hash=sha256:99ded130555c021d99729fabd4ddb91a6f4cc0707df4b1daf912c7850c373b13 \ - --hash=sha256:a3328c3e64ea4ab12b85999eb0779e6139295bbf5485f69d42cf794309e3d007 \ - --hash=sha256:a4fb91d5f72b7e06a14ff4ae5be625a81cd7e5f869d7a54578fc271d08d58ae3 \ - --hash=sha256:aa23ce39661a3e90eea5f99ec59b763b7d655c2cada10729ed920a38bfc2b167 \ - --hash=sha256:aac7501ae73d4a02f4b7ac8fcb9dc55342ca98ffb9ed9f2dfb8a25d53eda0e4d \ - --hash=sha256:ab84a8b698ad5a6c365b08061920138e7a7dd9a04b6feb09ba1bfae68346ce6d \ - --hash=sha256:b4adeb878a374126f1e5cf03b87f66279f479e01af0e9a654cf6d1509af46c40 \ - --hash=sha256:b9853509b4bf57ba7b1f99b9d866c422c9c5248799ab20e652bbb8a184a38181 \ - --hash=sha256:bb7d5fe92bd0dc235f63ebe9f8c6e0884f7360f88f3411bfed1350c872ef2054 \ - --hash=sha256:bca4c8abc50d38f9773c1ec80d43f3768df2e8576807d1656016b9d3eeaa96fd \ - --hash=sha256:c222958f59b0ae091f4535851cbb24eb57fc0baea07ba675af718fb5302dddb2 \ - --hash=sha256:c30e42ea11badb147f0d2e387115b15e2bd8205a5ad70d6ad79cf37f6ac08c91 \ - --hash=sha256:c3a79f56dee9136084cf84a6c7c4341427ef36e05ae6415bf7d787c96ff5eaa3 \ - --hash=sha256:c51ef82302386d686feea1c44dbeef744585da16fcf97deea2a8d6c1556f519b \ - --hash=sha256:c77326300b839c44c3e5a8fe26c15b7e87b2f32dfd2fc9fee1d13604347c9b38 \ - --hash=sha256:d33a785ea8354c480515e781554d3be582a86297e41ccbea627a5c632647f2cd \ - --hash=sha256:d546cfa78844b8b9c1c0533de1851569a13f87449897bbc95d698d1d3cb2a30f \ - --hash=sha256:da29ceabe3025a1e5a5aeeb331c5b1af686daab4ff0fb4f83df18b1180ea83e2 \ - --hash=sha256:df8c05a0f574d480947cba11b947dc41b1265d721c3777881da2fb8d3a1ddfba \ - --hash=sha256:e266af4da2c1a4cbc6135a570c64577fd3e6eb204607eaff99d8e9b710003c6f \ - --hash=sha256:e279f3db904e3b55f520f11f983cc8dc8a4ce9b65f11692d4718ed021ec58b83 \ - --hash=sha256:ea52bd218d4ba260399a8ae4bb6b577d82adfc4518b93566ce1fddd4a49d1dce \ - --hash=sha256:ebec65f5068e7df2d49466aab9128510c4867e532e07cb6960075b27658dca38 \ - --hash=sha256:ec1e3b40b82236d100d259854840555469fad4db64f669ab817279eb95cd535c \ - --hash=sha256:ee77c7bef0724165e795b6b7bf9c4c22a9b8468a6bdb9c6b4281293c6b22a90f \ - --hash=sha256:f263b18692f8ed52c8de7f40a0751e79015983dbd77b16906e5b310a39d3ca21 \ - --hash=sha256:f7b26757b22faf88fcf232f5f0e62f6e0fd9e22a8a5d0d5016888cdfe1f6c1c4 \ - --hash=sha256:f7ddb920106bbbbcaf2a274d56f46956bf56ecbde210d88061824a95bdd94e92 +coverage[toml]==7.2.2 \ + --hash=sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d \ + --hash=sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4 \ + --hash=sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e \ + --hash=sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab \ + --hash=sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90 \ + --hash=sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6 \ + --hash=sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731 \ + --hash=sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540 \ + --hash=sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2 \ + --hash=sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292 \ + --hash=sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5 \ + --hash=sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b \ + --hash=sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2 \ + --hash=sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0 \ + --hash=sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57 \ + --hash=sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3 \ + --hash=sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140 \ + --hash=sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84 \ + --hash=sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988 \ + --hash=sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67 \ + --hash=sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d \ + --hash=sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2 \ + --hash=sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5 \ + --hash=sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9 \ + --hash=sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8 \ + --hash=sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd \ + --hash=sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6 \ + --hash=sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be \ + --hash=sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88 \ + --hash=sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25 \ + --hash=sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137 \ + --hash=sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968 \ + --hash=sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9 \ + --hash=sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef \ + --hash=sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54 \ + --hash=sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512 \ + --hash=sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005 \ + --hash=sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f \ + --hash=sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149 \ + --hash=sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d \ + --hash=sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8 \ + --hash=sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7 \ + --hash=sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5 \ + --hash=sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016 \ + --hash=sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69 \ + --hash=sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212 \ + --hash=sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc \ + --hash=sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8 \ + --hash=sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d \ + --hash=sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd \ + --hash=sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169 # via pytest-cov -cryptography==43.0.1 \ - --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ - --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ - --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ - --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ - --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ - --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ - --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ - --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ - --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ - --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ - --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ - --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ - --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ - --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ - --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ - --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ - --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ - --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ - --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ - --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ - --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ - --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ - --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ - --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ - --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ - --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ - --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 +cryptography==39.0.2 \ + --hash=sha256:103e8f7155f3ce2ffa0049fe60169878d47a4364b277906386f8de21c9234aa1 \ + --hash=sha256:23df8ca3f24699167daf3e23e51f7ba7334d504af63a94af468f468b975b7dd7 \ + --hash=sha256:2725672bb53bb92dc7b4150d233cd4b8c59615cd8288d495eaa86db00d4e5c06 \ + --hash=sha256:30b1d1bfd00f6fc80d11300a29f1d8ab2b8d9febb6ed4a38a76880ec564fae84 \ + --hash=sha256:35d658536b0a4117c885728d1a7032bdc9a5974722ae298d6c533755a6ee3915 \ + --hash=sha256:50cadb9b2f961757e712a9737ef33d89b8190c3ea34d0fb6675e00edbe35d074 \ + --hash=sha256:5f8c682e736513db7d04349b4f6693690170f95aac449c56f97415c6980edef5 \ + --hash=sha256:6236a9610c912b129610eb1a274bdc1350b5df834d124fa84729ebeaf7da42c3 \ + --hash=sha256:788b3921d763ee35dfdb04248d0e3de11e3ca8eb22e2e48fef880c42e1f3c8f9 \ + --hash=sha256:8bc0008ef798231fac03fe7d26e82d601d15bd16f3afaad1c6113771566570f3 \ + --hash=sha256:8f35c17bd4faed2bc7797d2a66cbb4f986242ce2e30340ab832e5d99ae60e011 \ + --hash=sha256:b49a88ff802e1993b7f749b1eeb31134f03c8d5c956e3c125c75558955cda536 \ + --hash=sha256:bc0521cce2c1d541634b19f3ac661d7a64f9555135e9d8af3980965be717fd4a \ + --hash=sha256:bc5b871e977c8ee5a1bbc42fa8d19bcc08baf0c51cbf1586b0e87a2694dde42f \ + --hash=sha256:c43ac224aabcbf83a947eeb8b17eaf1547bce3767ee2d70093b461f31729a480 \ + --hash=sha256:d15809e0dbdad486f4ad0979753518f47980020b7a34e9fc56e8be4f60702fac \ + --hash=sha256:d7d84a512a59f4412ca8549b01f94be4161c94efc598bf09d027d67826beddc0 \ + --hash=sha256:e029b844c21116564b8b61216befabca4b500e6816fa9f0ba49527653cae2108 \ + --hash=sha256:e8a0772016feeb106efd28d4a328e77dc2edae84dfbac06061319fdb669ff828 \ + --hash=sha256:e944fe07b6f229f4c1a06a7ef906a19652bdd9fd54c761b0ff87e83ae7a30354 \ + --hash=sha256:eb40fe69cfc6f5cdab9a5ebd022131ba21453cf7b8a7fd3631f45bbf52bed612 \ + --hash=sha256:fa507318e427169ade4e9eccef39e9011cdc19534f55ca2f36ec3f388c1f70f3 \ + --hash=sha256:ffd394c7896ed7821a6d13b24657c6a34b6e2650bd84ae063cf11ccffa4f1a97 # via # ansible-core # paramiko # pyjwt -cvss==3.2 \ - --hash=sha256:5e8e0f95e9e6c9dec0c2ef38f14153b2c019cfcd2fd6e75b5ad7da80a110fa8b \ - --hash=sha256:e2c2fbc8943c6df83d23c7ad74bb9c5e584c666a9455b013b598e292e5018469 +cvss==2.5 \ + --hash=sha256:63f648cffb2647498cf28646a7004fe0f48872c9ba7d8653bebd710409f8ba0e \ + --hash=sha256:c1a48f8a6024642b986c51182a79d7aa2c05d1d75c57ad3496b9ab0451a8e89a # via -r requirements.in decorator==5.1.1 \ --hash=sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330 \ @@ -394,43 +336,43 @@ defusedxml==0.7.1 \ --hash=sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69 \ --hash=sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61 # via jira -deprecated==1.2.14 \ - --hash=sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c \ - --hash=sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3 +deprecated==1.2.13 \ + --hash=sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d \ + --hash=sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d # via # fabric # pygithub -docstring-parser==0.16 \ - --hash=sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e \ - --hash=sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637 +docstring-parser==0.15 \ + --hash=sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682 \ + --hash=sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9 # via simple-parsing -ecdsa==0.19.0 \ - --hash=sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a \ - --hash=sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8 +ecdsa==0.18.0 \ + --hash=sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49 \ + --hash=sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd # via ic-py fabric==3.2.2 \ --hash=sha256:8783ca42e3b0076f08b26901aac6b9d9b1f19c410074e7accfab902c184ff4a3 \ --hash=sha256:91c47c0be68b14936c88b34da8a1f55e5710fd28397dac5d4ff2e21558113a6f # via -r requirements.in -gitdb==4.0.11 \ - --hash=sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4 \ - --hash=sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b +gitdb==4.0.10 \ + --hash=sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a \ + --hash=sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7 # via gitpython -gitpython==3.1.43 \ - --hash=sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c \ - --hash=sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff +gitpython==3.1.31 \ + --hash=sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573 \ + --hash=sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d # via -r requirements.in h11==0.14.0 \ --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \ --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761 # via httpcore -httpcore==1.0.6 \ - --hash=sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f \ - --hash=sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f +httpcore==0.16.3 \ + --hash=sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb \ + --hash=sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0 # via httpx -httpx==0.27.2 \ - --hash=sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0 \ - --hash=sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2 +httpx==0.23.3 \ + --hash=sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9 \ + --hash=sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6 # via ic-py ic-py @ https://github.com/rocklabs-io/ic-py/archive/53c375a1d6c1d09e8d24588142dece550b801cef.zip \ --hash=sha256:a07b08e996d4be6749ba10934cc519daf08f20b8874551960274d8dc2c6260f4 @@ -440,8 +382,8 @@ idna==3.4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via # anyio - # httpx # requests + # rfc3986 idracredfishsupport==0.0.8 \ --hash=sha256:964b17525f12db358fea93ce2ca491b7661fd8f7567365190da3e6729965bb19 # via -r requirements.in @@ -449,132 +391,118 @@ iniconfig==2.0.0 \ --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest -invoke==2.2.0 \ - --hash=sha256:6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820 \ - --hash=sha256:ee6cbb101af1a859c7fe84f2a264c059020b0cb7fe3535f9424300ab568f6bd5 +invoke==2.1.2 \ + --hash=sha256:a6cc1f06f75bacd0b1e11488fa3bf3e62f85e31f62e2c0172188613ba5b070e2 \ + --hash=sha256:bfc904df1c9e9fe1a881933de661fe054b8db616ff2c4cf78e00407fe473ba5d # via # -r requirements.in # fabric -jinja2==3.1.4 \ - --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ - --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via ansible-core -jira==3.8.0 \ - --hash=sha256:12190dc84dad00b8a6c0341f7e8a254b0f38785afdec022bd5941e1184a5a3fb \ - --hash=sha256:63719c529a570aaa01c3373dbb5a104dab70381c5be447f6c27f997302fa335a +jira==3.4.1 \ + --hash=sha256:20e2df51cc686f663034543c4dec65783e9c721de6448b433d659d5c95b01c7b \ + --hash=sha256:9b1286a3eada593bb1c83e12989dc2e7c796867523049fa6a680c3a2757e4d17 # via -r requirements.in junit-xml==1.9 \ --hash=sha256:de16a051990d4e25a3982b2dd9e89d671067548718866416faec14d9de56db9f \ --hash=sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732 # via -r requirements.in -leb128==1.0.8 \ - --hash=sha256:3a52dca242f93f87a3d766380a93a3fad53ef4044f03018d21705d3b2d9021ee \ - --hash=sha256:76cd271e75ea91aa2fbf7783d60cb7d667b62143d544bcee59159ff258bf4523 +leb128==1.0.5 \ + --hash=sha256:cb16001f0087b499ab51f6b8e3ef8377ba14a0c9990db85316dedf0ad4a54e80 # via ic-py loguru==0.7.2 \ --hash=sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb \ --hash=sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac # via -r requirements.in -markupsafe==3.0.1 \ - --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ - --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ - --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ - --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ - --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ - --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ - --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ - --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ - --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ - --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ - --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ - --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ - --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ - --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ - --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ - --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ - --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ - --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ - --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ - --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ - --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ - --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ - --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ - --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ - --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ - --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ - --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ - --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ - --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ - --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ - --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ - --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ - --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ - --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ - --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ - --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ - --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ - --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ - --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ - --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ - --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ - --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ - --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ - --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ - --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ - --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ - --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ - --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ - --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ - --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ - --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ - --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ - --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ - --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ - --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ - --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ - --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ - --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ - --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ - --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ - --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f +markupsafe==2.1.2 \ + --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \ + --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \ + --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \ + --hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \ + --hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \ + --hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \ + --hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \ + --hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \ + --hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \ + --hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \ + --hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \ + --hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \ + --hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \ + --hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \ + --hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \ + --hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \ + --hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \ + --hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \ + --hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \ + --hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \ + --hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \ + --hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \ + --hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \ + --hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \ + --hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \ + --hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \ + --hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \ + --hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \ + --hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \ + --hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \ + --hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \ + --hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \ + --hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \ + --hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \ + --hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \ + --hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \ + --hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \ + --hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \ + --hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \ + --hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \ + --hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \ + --hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \ + --hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \ + --hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \ + --hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \ + --hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \ + --hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \ + --hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \ + --hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \ + --hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58 # via jinja2 mnemonic==0.20 \ --hash=sha256:7c6fb5639d779388027a77944680aee4870f0fcd09b1e42a5525ee2ce4c625f6 \ --hash=sha256:acd2168872d0379e7a10873bb3e12bf6c91b35de758135c4fbd1015ef18fafc5 # via ic-py -mypy==1.12.0 \ - --hash=sha256:060a07b10e999ac9e7fa249ce2bdcfa9183ca2b70756f3bce9df7a92f78a3c0a \ - --hash=sha256:06de0498798527451ffb60f68db0d368bd2bae2bbfb5237eae616d4330cc87aa \ - --hash=sha256:0eff042d7257f39ba4ca06641d110ca7d2ad98c9c1fb52200fe6b1c865d360ff \ - --hash=sha256:1ebf9e796521f99d61864ed89d1fb2926d9ab6a5fab421e457cd9c7e4dd65aa9 \ - --hash=sha256:20c7c5ce0c1be0b0aea628374e6cf68b420bcc772d85c3c974f675b88e3e6e57 \ - --hash=sha256:233e11b3f73ee1f10efada2e6da0f555b2f3a5316e9d8a4a1224acc10e7181d3 \ - --hash=sha256:2c40658d4fa1ab27cb53d9e2f1066345596af2f8fe4827defc398a09c7c9519b \ - --hash=sha256:2f106db5ccb60681b622ac768455743ee0e6a857724d648c9629a9bd2ac3f721 \ - --hash=sha256:4397081e620dc4dc18e2f124d5e1d2c288194c2c08df6bdb1db31c38cd1fe1ed \ - --hash=sha256:48d3e37dd7d9403e38fa86c46191de72705166d40b8c9f91a3de77350daa0893 \ - --hash=sha256:4ae8959c21abcf9d73aa6c74a313c45c0b5a188752bf37dace564e29f06e9c1b \ - --hash=sha256:4b86de37a0da945f6d48cf110d5206c5ed514b1ca2614d7ad652d4bf099c7de7 \ - --hash=sha256:52b9e1492e47e1790360a43755fa04101a7ac72287b1a53ce817f35899ba0521 \ - --hash=sha256:5bc81701d52cc8767005fdd2a08c19980de9ec61a25dbd2a937dfb1338a826f9 \ - --hash=sha256:5feee5c74eb9749e91b77f60b30771563327329e29218d95bedbe1257e2fe4b0 \ - --hash=sha256:65a22d87e757ccd95cbbf6f7e181e6caa87128255eb2b6be901bb71b26d8a99d \ - --hash=sha256:684a9c508a283f324804fea3f0effeb7858eb03f85c4402a967d187f64562469 \ - --hash=sha256:6b5df6c8a8224f6b86746bda716bbe4dbe0ce89fd67b1fa4661e11bfe38e8ec8 \ - --hash=sha256:6cabe4cda2fa5eca7ac94854c6c37039324baaa428ecbf4de4567279e9810f9e \ - --hash=sha256:77278e8c6ffe2abfba6db4125de55f1024de9a323be13d20e4f73b8ed3402bd1 \ - --hash=sha256:8462655b6694feb1c99e433ea905d46c478041a8b8f0c33f1dab00ae881b2164 \ - --hash=sha256:923ea66d282d8af9e0f9c21ffc6653643abb95b658c3a8a32dca1eff09c06475 \ - --hash=sha256:9b9ce1ad8daeb049c0b55fdb753d7414260bad8952645367e70ac91aec90e07e \ - --hash=sha256:a64ee25f05fc2d3d8474985c58042b6759100a475f8237da1f4faf7fcd7e6309 \ - --hash=sha256:bfe012b50e1491d439172c43ccb50db66d23fab714d500b57ed52526a1020bb7 \ - --hash=sha256:c72861b7139a4f738344faa0e150834467521a3fba42dc98264e5aa9507dd601 \ - --hash=sha256:dcfb754dea911039ac12434d1950d69a2f05acd4d56f7935ed402be09fad145e \ - --hash=sha256:dee78a8b9746c30c1e617ccb1307b351ded57f0de0d287ca6276378d770006c0 \ - --hash=sha256:e478601cc3e3fa9d6734d255a59c7a2e5c2934da4378f3dd1e3411ea8a248642 \ - --hash=sha256:eafc1b7319b40ddabdc3db8d7d48e76cfc65bbeeafaa525a4e0fa6b76175467f \ - --hash=sha256:faca7ab947c9f457a08dcb8d9a8664fd438080e002b0fa3e41b0535335edcf7f \ - --hash=sha256:fd313226af375d52e1e36c383f39bf3836e1f192801116b31b090dfcd3ec5266 +multimethod==1.9.1 \ + --hash=sha256:1589bf52ca294667fd15527ea830127c763f5bfc38562e3642591ffd0fd9d56f \ + --hash=sha256:52f8f1f2b9d5a4c7adfdcc114dbeeebe3245a4420801e8807e26522a79fb6bc2 + # via waiter +mypy==1.1.1 \ + --hash=sha256:0a28a76785bf57655a8ea5eb0540a15b0e781c807b5aa798bd463779988fa1d5 \ + --hash=sha256:19ba15f9627a5723e522d007fe708007bae52b93faab00f95d72f03e1afa9598 \ + --hash=sha256:21b437be1c02712a605591e1ed1d858aba681757a1e55fe678a15c2244cd68a5 \ + --hash=sha256:26cdd6a22b9b40b2fd71881a8a4f34b4d7914c679f154f43385ca878a8297389 \ + --hash=sha256:2888ce4fe5aae5a673386fa232473014056967f3904f5abfcf6367b5af1f612a \ + --hash=sha256:2b0c373d071593deefbcdd87ec8db91ea13bd8f1328d44947e88beae21e8d5e9 \ + --hash=sha256:315ac73cc1cce4771c27d426b7ea558fb4e2836f89cb0296cbe056894e3a1f78 \ + --hash=sha256:39c7119335be05630611ee798cc982623b9e8f0cff04a0b48dfc26100e0b97af \ + --hash=sha256:4b398d8b1f4fba0e3c6463e02f8ad3346f71956b92287af22c9b12c3ec965a9f \ + --hash=sha256:4e4e8b362cdf99ba00c2b218036002bdcdf1e0de085cdb296a49df03fb31dfc4 \ + --hash=sha256:59bbd71e5c58eed2e992ce6523180e03c221dcd92b52f0e792f291d67b15a71c \ + --hash=sha256:5b5f81b40d94c785f288948c16e1f2da37203c6006546c5d947aab6f90aefef2 \ + --hash=sha256:5cb14ff9919b7df3538590fc4d4c49a0f84392237cbf5f7a816b4161c061829e \ + --hash=sha256:61bf08362e93b6b12fad3eab68c4ea903a077b87c90ac06c11e3d7a09b56b9c1 \ + --hash=sha256:64cc3afb3e9e71a79d06e3ed24bb508a6d66f782aff7e56f628bf35ba2e0ba51 \ + --hash=sha256:69b35d1dcb5707382810765ed34da9db47e7f95b3528334a3c999b0c90fe523f \ + --hash=sha256:9401e33814cec6aec8c03a9548e9385e0e228fc1b8b0a37b9ea21038e64cdd8a \ + --hash=sha256:a380c041db500e1410bb5b16b3c1c35e61e773a5c3517926b81dfdab7582be54 \ + --hash=sha256:ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f \ + --hash=sha256:b7c7b708fe9a871a96626d61912e3f4ddd365bf7f39128362bc50cbd74a634d5 \ + --hash=sha256:c1c10fa12df1232c936830839e2e935d090fc9ee315744ac33b8a32216b93707 \ + --hash=sha256:ce61663faf7a8e5ec6f456857bfbcec2901fbdb3ad958b778403f63b9e606a1b \ + --hash=sha256:d64c28e03ce40d5303450f547e07418c64c241669ab20610f273c9e6290b4b0b \ + --hash=sha256:d809f88734f44a0d44959d795b1e6f64b2bbe0ea4d9cc4776aa588bb4229fc1c \ + --hash=sha256:dbb19c9f662e41e474e0cff502b7064a7edc6764f5262b6cd91d698163196799 \ + --hash=sha256:ef6a01e563ec6a4940784c574d33f6ac1943864634517984471642908b30b6f7 # via -r requirements.in mypy-extensions==1.0.0 \ --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ @@ -591,116 +519,38 @@ oauthlib==3.2.2 \ --hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \ --hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918 # via requests-oauthlib -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +packaging==21.3 \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 # via # -r requirements.in # ansible-core # jira # pytest -paramiko==3.5.0 \ - --hash=sha256:1fedf06b085359051cd7d0d270cebe19e755a8a921cc2ddbfa647fb0cd7d68f9 \ - --hash=sha256:ad11e540da4f55cedda52931f1a3f812a8238a7af7f62a60de538cd80bb28124 +paramiko==3.0.0 \ + --hash=sha256:6bef55b882c9d130f8015b9a26f4bd93f710e90fe7478b9dcc810304e79b3cd8 \ + --hash=sha256:fedc9b1dd43bc1d45f67f1ceca10bc336605427a46dcdf8dec6bfea3edf57965 # via # -r requirements.in # fabric -parse==1.20.2 \ - --hash=sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558 \ - --hash=sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce +parse==1.19.0 \ + --hash=sha256:9ff82852bcb65d139813e2a5197627a94966245c897796760a3a2a8eb66f020b # via -r requirements.in -pillow==11.0.0 \ - --hash=sha256:00177a63030d612148e659b55ba99527803288cea7c75fb05766ab7981a8c1b7 \ - --hash=sha256:006bcdd307cc47ba43e924099a038cbf9591062e6c50e570819743f5607404f5 \ - --hash=sha256:084a07ef0821cfe4858fe86652fffac8e187b6ae677e9906e192aafcc1b69903 \ - --hash=sha256:0ae08bd8ffc41aebf578c2af2f9d8749d91f448b3bfd41d7d9ff573d74f2a6b2 \ - --hash=sha256:0e038b0745997c7dcaae350d35859c9715c71e92ffb7e0f4a8e8a16732150f38 \ - --hash=sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2 \ - --hash=sha256:16095692a253047fe3ec028e951fa4221a1f3ed3d80c397e83541a3037ff67c9 \ - --hash=sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f \ - --hash=sha256:1c1d72714f429a521d8d2d018badc42414c3077eb187a59579f28e4270b4b0fc \ - --hash=sha256:1e2688958a840c822279fda0086fec1fdab2f95bf2b717b66871c4ad9859d7e8 \ - --hash=sha256:20ec184af98a121fb2da42642dea8a29ec80fc3efbaefb86d8fdd2606619045d \ - --hash=sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2 \ - --hash=sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316 \ - --hash=sha256:2679d2258b7f1192b378e2893a8a0a0ca472234d4c2c0e6bdd3380e8dfa21b6a \ - --hash=sha256:27a7860107500d813fcd203b4ea19b04babe79448268403172782754870dac25 \ - --hash=sha256:290f2cc809f9da7d6d622550bbf4c1e57518212da51b6a30fe8e0a270a5b78bd \ - --hash=sha256:2e46773dc9f35a1dd28bd6981332fd7f27bec001a918a72a79b4133cf5291dba \ - --hash=sha256:3107c66e43bda25359d5ef446f59c497de2b5ed4c7fdba0894f8d6cf3822dafc \ - --hash=sha256:375b8dd15a1f5d2feafff536d47e22f69625c1aa92f12b339ec0b2ca40263273 \ - --hash=sha256:45c566eb10b8967d71bf1ab8e4a525e5a93519e29ea071459ce517f6b903d7fa \ - --hash=sha256:499c3a1b0d6fc8213519e193796eb1a86a1be4b1877d678b30f83fd979811d1a \ - --hash=sha256:4ad70c4214f67d7466bea6a08061eba35c01b1b89eaa098040a35272a8efb22b \ - --hash=sha256:4b60c9520f7207aaf2e1d94de026682fc227806c6e1f55bba7606d1c94dd623a \ - --hash=sha256:5178952973e588b3f1360868847334e9e3bf49d19e169bbbdfaf8398002419ae \ - --hash=sha256:52a2d8323a465f84faaba5236567d212c3668f2ab53e1c74c15583cf507a0291 \ - --hash=sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97 \ - --hash=sha256:5bd2d3bdb846d757055910f0a59792d33b555800813c3b39ada1829c372ccb06 \ - --hash=sha256:5c39ed17edea3bc69c743a8dd3e9853b7509625c2462532e62baa0732163a904 \ - --hash=sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b \ - --hash=sha256:5ddbfd761ee00c12ee1be86c9c0683ecf5bb14c9772ddbd782085779a63dd55b \ - --hash=sha256:607bbe123c74e272e381a8d1957083a9463401f7bd01287f50521ecb05a313f8 \ - --hash=sha256:61b887f9ddba63ddf62fd02a3ba7add935d053b6dd7d58998c630e6dbade8527 \ - --hash=sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947 \ - --hash=sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb \ - --hash=sha256:6ec0d5af64f2e3d64a165f490d96368bb5dea8b8f9ad04487f9ab60dc4bb6003 \ - --hash=sha256:6f4dba50cfa56f910241eb7f883c20f1e7b1d8f7d91c750cd0b318bad443f4d5 \ - --hash=sha256:70fbbdacd1d271b77b7721fe3cdd2d537bbbd75d29e6300c672ec6bb38d9672f \ - --hash=sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739 \ - --hash=sha256:7326a1787e3c7b0429659e0a944725e1b03eeaa10edd945a86dead1913383944 \ - --hash=sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830 \ - --hash=sha256:73e3a0200cdda995c7e43dd47436c1548f87a30bb27fb871f352a22ab8dcf45f \ - --hash=sha256:75acbbeb05b86bc53cbe7b7e6fe00fbcf82ad7c684b3ad82e3d711da9ba287d3 \ - --hash=sha256:8069c5179902dcdce0be9bfc8235347fdbac249d23bd90514b7a47a72d9fecf4 \ - --hash=sha256:846e193e103b41e984ac921b335df59195356ce3f71dcfd155aa79c603873b84 \ - --hash=sha256:8594f42df584e5b4bb9281799698403f7af489fba84c34d53d1c4bfb71b7c4e7 \ - --hash=sha256:86510e3f5eca0ab87429dd77fafc04693195eec7fd6a137c389c3eeb4cfb77c6 \ - --hash=sha256:8853a3bf12afddfdf15f57c4b02d7ded92c7a75a5d7331d19f4f9572a89c17e6 \ - --hash=sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9 \ - --hash=sha256:8ba470552b48e5835f1d23ecb936bb7f71d206f9dfeee64245f30c3270b994de \ - --hash=sha256:8c676b587da5673d3c75bd67dd2a8cdfeb282ca38a30f37950511766b26858c4 \ - --hash=sha256:8ec4a89295cd6cd4d1058a5e6aec6bf51e0eaaf9714774e1bfac7cfc9051db47 \ - --hash=sha256:94f3e1780abb45062287b4614a5bc0874519c86a777d4a7ad34978e86428b8dd \ - --hash=sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50 \ - --hash=sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c \ - --hash=sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086 \ - --hash=sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba \ - --hash=sha256:b4fd7bd29610a83a8c9b564d457cf5bd92b4e11e79a4ee4716a63c959699b306 \ - --hash=sha256:bcd1fb5bb7b07f64c15618c89efcc2cfa3e95f0e3bcdbaf4642509de1942a699 \ - --hash=sha256:c12b5ae868897c7338519c03049a806af85b9b8c237b7d675b8c5e089e4a618e \ - --hash=sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488 \ - --hash=sha256:c6a660307ca9d4867caa8d9ca2c2658ab685de83792d1876274991adec7b93fa \ - --hash=sha256:c809a70e43c7977c4a42aefd62f0131823ebf7dd73556fa5d5950f5b354087e2 \ - --hash=sha256:c8b2351c85d855293a299038e1f89db92a2f35e8d2f783489c6f0b2b5f3fe8a3 \ - --hash=sha256:cb929ca942d0ec4fac404cbf520ee6cac37bf35be479b970c4ffadf2b6a1cad9 \ - --hash=sha256:d2c0a187a92a1cb5ef2c8ed5412dd8d4334272617f532d4ad4de31e0495bd923 \ - --hash=sha256:d69bfd8ec3219ae71bcde1f942b728903cad25fafe3100ba2258b973bd2bc1b2 \ - --hash=sha256:daffdf51ee5db69a82dd127eabecce20729e21f7a3680cf7cbb23f0829189790 \ - --hash=sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734 \ - --hash=sha256:eda2616eb2313cbb3eebbe51f19362eb434b18e3bb599466a1ffa76a033fb916 \ - --hash=sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1 \ - --hash=sha256:f02541ef64077f22bf4924f225c0fd1248c168f86e4b7abdedd87d6ebaceab0f \ - --hash=sha256:f1b82c27e89fffc6da125d5eb0ca6e68017faf5efc078128cfaa42cf5cb38798 \ - --hash=sha256:fba162b8872d30fea8c52b258a542c5dfd7b235fb5cb352240c8d63b414013eb \ - --hash=sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2 \ - --hash=sha256:fcb4621042ac4b7865c179bb972ed0da0218a076dc1820ffc48b1d74c1e37fe9 - # via jira -pluggy==1.5.0 \ - --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ - --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 +pluggy==1.0.0 \ + --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ + --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 # via pytest -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygithub==2.4.0 \ - --hash=sha256:6601e22627e87bac192f1e2e39c6e6f69a43152cfb8f307cee575879320b3051 \ - --hash=sha256:81935aa4bdc939fba98fee1cb47422c09157c56a27966476ff92775602b9ee24 +pygithub==2.3.0 \ + --hash=sha256:0148d7347a1cdeed99af905077010aef81a4dad988b0ba51d4108bf66b443f7e \ + --hash=sha256:65b499728be3ce7b0cd2cd760da3b32f0f4d7bc55e5e0677617f90f6564e793e # via -r requirements.in -pyjwt[crypto]==2.9.0 \ - --hash=sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 \ - --hash=sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c +pyjwt[crypto]==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via pygithub pynacl==1.5.0 \ --hash=sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858 \ @@ -716,15 +566,19 @@ pynacl==1.5.0 \ # via # paramiko # pygithub -pytest==8.3.3 \ - --hash=sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181 \ - --hash=sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2 +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc + # via packaging +pytest==7.2.2 \ + --hash=sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e \ + --hash=sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4 # via # -r requirements.in # pytest-cov -pytest-cov==5.0.0 \ - --hash=sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652 \ - --hash=sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857 +pytest-cov==4.0.0 \ + --hash=sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b \ + --hash=sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470 # via -r requirements.in pyyaml==6.0.2 \ --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ @@ -783,9 +637,9 @@ pyyaml==6.0.2 \ # via # -r requirements.in # ansible-core -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 +requests==2.28.2 \ + --hash=sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa \ + --hash=sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf # via # -r requirements.in # idracredfishsupport @@ -793,21 +647,25 @@ requests==2.32.3 \ # pygithub # requests-oauthlib # requests-toolbelt -requests-oauthlib==2.0.0 \ - --hash=sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36 \ - --hash=sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9 +requests-oauthlib==1.3.1 \ + --hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \ + --hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a # via jira -requests-toolbelt==1.0.0 \ - --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ - --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via jira -resolvelib==1.0.1 \ - --hash=sha256:04ce76cbd63fded2078ce224785da6ecd42b9564b1390793f64ddecbe997b309 \ - --hash=sha256:d2da45d1a8dfee81bdd591647783e340ef3bcb104b54c383f70d422ef5cc7dbf +resolvelib==0.8.1 \ + --hash=sha256:c6ea56732e9fb6fca1b2acc2ccc68a0b6b8c566d8f3e78e0443310ede61dbd37 \ + --hash=sha256:d9b7907f055c3b3a2cfc56c914ffd940122915826ff5fb5b1de0c99778f4de98 # via ansible-core -simple-parsing==0.1.6 \ - --hash=sha256:2a6e74b061fb754cc441559e8dcea9d108286d9e0ffaa9cca4eea6bbe85372e1 \ - --hash=sha256:dad192e9633515a5627e343106636590a39a5ce85f6c47ced43507044ed98956 +rfc3986[idna2008]==1.5.0 \ + --hash=sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835 \ + --hash=sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97 + # via httpx +simple-parsing==0.1.4 \ + --hash=sha256:1fb042499d8872090ed3aad1f60f4d53026d2062942196a600933bc83787328e \ + --hash=sha256:312300327d8f0beb61b6419500423bd335be8b83da66f733e9c67c6c18341ae8 # via -r requirements.in six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ @@ -816,19 +674,20 @@ six==1.16.0 \ # ecdsa # junit-xml # nested-lookup -smmap==5.0.1 \ - --hash=sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62 \ - --hash=sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da +smmap==5.0.0 \ + --hash=sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94 \ + --hash=sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936 # via gitdb -sniffio==1.3.1 \ - --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ - --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc +sniffio==1.3.0 \ + --hash=sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101 \ + --hash=sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384 # via # anyio + # httpcore # httpx -tqdm==4.66.5 \ - --hash=sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd \ - --hash=sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad +tqdm==4.66.1 \ + --hash=sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386 \ + --hash=sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7 # via -r requirements.in typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ @@ -839,88 +698,93 @@ typing-extensions==4.12.2 \ # mypy # pygithub # simple-parsing -urllib3==2.2.3 \ - --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ - --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 +urllib3==1.26.14 \ + --hash=sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72 \ + --hash=sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1 # via # pygithub # requests uuid==1.30 \ --hash=sha256:1f87cc004ac5120466f36c5beae48b4c48cc411968eed0eaecd3da82aa96193f # via -r requirements.in -waiter==1.5 \ - --hash=sha256:5828a6370aa1b4c7e35cd5c00d9463007b91c96b38ded76ec84e47e5e0bb0117 \ - --hash=sha256:97767067a18af26bd344f75cffdcd6c5bcccdf1c7b5588278855a8339a4570ca +waiter==1.3 \ + --hash=sha256:76e5ea32c557b534268b157f68d8c3344ae073b9f09cf960bdc55159e507d3ff \ + --hash=sha256:bf172186611744bf8f0d3837c82884a0a68e8ee3c319d96f26ba530aa515c730 # via ic-py -wrapt==1.16.0 \ - --hash=sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc \ - --hash=sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81 \ - --hash=sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09 \ - --hash=sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e \ - --hash=sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca \ - --hash=sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0 \ - --hash=sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb \ - --hash=sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487 \ - --hash=sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40 \ - --hash=sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c \ - --hash=sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060 \ - --hash=sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202 \ - --hash=sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41 \ - --hash=sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9 \ - --hash=sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b \ - --hash=sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664 \ - --hash=sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d \ - --hash=sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362 \ - --hash=sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00 \ - --hash=sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc \ - --hash=sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1 \ - --hash=sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267 \ - --hash=sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956 \ - --hash=sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966 \ - --hash=sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1 \ - --hash=sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228 \ - --hash=sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72 \ - --hash=sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d \ - --hash=sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292 \ - --hash=sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0 \ - --hash=sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0 \ - --hash=sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36 \ - --hash=sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c \ - --hash=sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5 \ - --hash=sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f \ - --hash=sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73 \ - --hash=sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b \ - --hash=sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2 \ - --hash=sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593 \ - --hash=sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39 \ - --hash=sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389 \ - --hash=sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf \ - --hash=sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf \ - --hash=sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89 \ - --hash=sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c \ - --hash=sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c \ - --hash=sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f \ - --hash=sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440 \ - --hash=sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465 \ - --hash=sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136 \ - --hash=sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b \ - --hash=sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8 \ - --hash=sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3 \ - --hash=sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8 \ - --hash=sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6 \ - --hash=sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e \ - --hash=sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f \ - --hash=sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c \ - --hash=sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e \ - --hash=sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8 \ - --hash=sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2 \ - --hash=sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020 \ - --hash=sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35 \ - --hash=sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d \ - --hash=sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3 \ - --hash=sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537 \ - --hash=sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809 \ - --hash=sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d \ - --hash=sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a \ - --hash=sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4 +wrapt==1.15.0 \ + --hash=sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0 \ + --hash=sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420 \ + --hash=sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a \ + --hash=sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c \ + --hash=sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079 \ + --hash=sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923 \ + --hash=sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f \ + --hash=sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1 \ + --hash=sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8 \ + --hash=sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86 \ + --hash=sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0 \ + --hash=sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364 \ + --hash=sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e \ + --hash=sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c \ + --hash=sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e \ + --hash=sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c \ + --hash=sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727 \ + --hash=sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff \ + --hash=sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e \ + --hash=sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29 \ + --hash=sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7 \ + --hash=sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72 \ + --hash=sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475 \ + --hash=sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a \ + --hash=sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317 \ + --hash=sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2 \ + --hash=sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd \ + --hash=sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640 \ + --hash=sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98 \ + --hash=sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248 \ + --hash=sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e \ + --hash=sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d \ + --hash=sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec \ + --hash=sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1 \ + --hash=sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e \ + --hash=sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9 \ + --hash=sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92 \ + --hash=sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb \ + --hash=sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094 \ + --hash=sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46 \ + --hash=sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29 \ + --hash=sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd \ + --hash=sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705 \ + --hash=sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8 \ + --hash=sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975 \ + --hash=sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb \ + --hash=sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e \ + --hash=sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b \ + --hash=sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418 \ + --hash=sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019 \ + --hash=sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1 \ + --hash=sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba \ + --hash=sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6 \ + --hash=sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2 \ + --hash=sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3 \ + --hash=sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7 \ + --hash=sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752 \ + --hash=sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416 \ + --hash=sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f \ + --hash=sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1 \ + --hash=sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc \ + --hash=sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145 \ + --hash=sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee \ + --hash=sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a \ + --hash=sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7 \ + --hash=sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b \ + --hash=sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653 \ + --hash=sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0 \ + --hash=sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90 \ + --hash=sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29 \ + --hash=sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6 \ + --hash=sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034 \ + --hash=sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09 \ + --hash=sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559 \ + --hash=sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639 # via deprecated diff --git a/rs/tests/BUILD.bazel b/rs/tests/BUILD.bazel index eda4a434fb1..4ab9e4c1a5c 100644 --- a/rs/tests/BUILD.bazel +++ b/rs/tests/BUILD.bazel @@ -37,6 +37,106 @@ rust_library( # Packages we install into the image; see base image definition and its # 'manifest = ...' field for more information PACKAGES = [ + "@focal//ca-certificates/amd64", + "@focal//bash/amd64", + "@focal//coreutils/amd64", + "@focal//gawk/amd64", + "@focal//dosfstools/amd64", + "@focal//libssl1.1/amd64", + "@focal//libunwind8/amd64", + "@focal//mtools/amd64", + "@focal//openssh-client/amd64", + "@focal//rsync/amd64", + "@focal//gzip/amd64", + "@focal//zstd/amd64", +] + +tar( + name = "sh", + mtree = [ + # needed as dpkg assumes sh is installed in a typical debian installation. + "./bin/sh type=link link=/bin/bash", + ], +) + +tar( + name = "mkfsvfat", + mtree = [ + # symlink instead of updating the PATH + "./bin/mkfs.vfat type=link link=/sbin/mkfs.vfat", + ], +) + +tar( + name = "awk", + mtree = [ + # we install the GNU variant but call 'awk' so we symlink + "./bin/awk type=link link=/usr/bin/gawk", + ], +) + +# Creates /var/lib/dpkg/status with installed package information. +dpkg_status( + name = "dpkg_status", + controls = [ + "%s:control" % package + for package in PACKAGES + ], +) + +passwd( + name = "passwd", + entries = [ + dict( + gecos = ["root"], + gid = 0, + home = "/root", + shell = "/usr/bin/bash", + uid = 0, + username = "root", + ), + ], +) + +oci_image( + name = "ubuntu_test_runtime_image_src", + architecture = "amd64", + os = "linux", + target_compatible_with = [ + "@platforms//os:linux", + ], + tars = [ + ":dpkg_status", + ":passwd", # needed because we ssh in + + # symlinks + ":awk", + ":mkfsvfat", + ":sh", + ] + PACKAGES, +) + +oci_tar( + name = "ubuntu_test_runtime.tar", + image = ":ubuntu_test_runtime_image_src", + repo_tags = ["ubuntu_test_runtime:image"], +) + +uvm_config_image( + name = "colocate_uvm_config_image", + srcs = [ + ":ubuntu_test_runtime.tar", + "//rs/tests:activate-systest-uvm-config", + ], + remap_paths = { + "activate-systest-uvm-config": "activate", + }, + tags = ["manual"], # this target will be built if required as a dependency of another target +) + +# Packages we install into the image; see base image definition and its +# 'manifest = ...' field for more information +PACKAGES_NOBLE = [ "@noble//bash/amd64", "@noble//ca-certificates/amd64", "@noble//coreutils/amd64", @@ -53,7 +153,7 @@ PACKAGES = [ ] tar( - name = "sh", + name = "sh_noble", mtree = [ # needed as dpkg assumes sh is installed in a typical debian installation. "./usr/bin/sh type=link link=/bin/bash", @@ -61,7 +161,7 @@ tar( ) tar( - name = "mkfsvfat", + name = "mkfsvfat_noble", mtree = [ # symlink instead of updating the PATH "./usr/bin/mkfs.vfat type=link link=/sbin/mkfs.vfat", @@ -69,7 +169,7 @@ tar( ) tar( - name = "awk", + name = "awk_noble", mtree = [ # we install the GNU variant but call 'awk' so we symlink "./usr/bin/awk type=link link=/usr/bin/gawk", @@ -78,10 +178,10 @@ tar( # Creates /var/lib/dpkg/status with installed package information. dpkg_status( - name = "dpkg_status", + name = "dpkg_status_noble", controls = [ "%s:control" % package - for package in PACKAGES + for package in PACKAGES_NOBLE ], target_compatible_with = [ "@platforms//os:linux", @@ -89,7 +189,7 @@ dpkg_status( ) passwd( - name = "passwd", + name = "passwd_noble", entries = [ dict( gecos = ["root"], @@ -103,33 +203,33 @@ passwd( ) oci_image( - name = "ubuntu_test_runtime_image_src", + name = "ubuntu_noble_test_runtime_image_src", architecture = "amd64", os = "linux", target_compatible_with = [ "@platforms//os:linux", ], tars = [ - ":dpkg_status", - ":passwd", # needed because we ssh in + ":dpkg_status_noble", + ":passwd_noble", # needed because we ssh in # symlinks - ":awk", - ":mkfsvfat", - ":sh", - ] + PACKAGES, + ":awk_noble", + ":mkfsvfat_noble", + ":sh_noble", + ] + PACKAGES_NOBLE, ) oci_tar( - name = "ubuntu_test_runtime.tar", - image = ":ubuntu_test_runtime_image_src", - repo_tags = ["ubuntu_test_runtime:image"], + name = "ubuntu_noble_test_runtime.tar", + image = ":ubuntu_noble_test_runtime_image_src", + repo_tags = ["ubuntu_noble_test_runtime:image"], ) uvm_config_image( - name = "colocate_uvm_config_image", + name = "colocate_uvm_noble_config_image", srcs = [ - ":ubuntu_test_runtime.tar", + ":ubuntu_noble_test_runtime.tar", "//rs/tests:activate-systest-uvm-config", ], remap_paths = { diff --git a/testnet/ansible/debug_vars_dump.yml b/testnet/ansible/debug_vars_dump.yml index bf53ba97fd0..22c99a4b077 100644 --- a/testnet/ansible/debug_vars_dump.yml +++ b/testnet/ansible/debug_vars_dump.yml @@ -4,5 +4,5 @@ tasks: - name: Display all variables/facts known for a host - ansible.builtin.debug: + debug: var: hostvars[inventory_hostname] diff --git a/testnet/ansible/ic_p8s_service_discovery_destroy.yml b/testnet/ansible/ic_p8s_service_discovery_destroy.yml index 8e346945d52..0dc3c8bebdc 100644 --- a/testnet/ansible/ic_p8s_service_discovery_destroy.yml +++ b/testnet/ansible/ic_p8s_service_discovery_destroy.yml @@ -8,7 +8,7 @@ - name: Remove all service discovery scraping targets vars: ic: "{{ ic_deployment_name }}" - ansible.builtin.uri: + uri: url: "{{ multiservice_discovery_server }}/{{ ic }}" method: DELETE return_content: true diff --git a/testnet/ansible/ic_p8s_service_discovery_install.yml b/testnet/ansible/ic_p8s_service_discovery_install.yml index f47f7cab9b4..148d56a2006 100644 --- a/testnet/ansible/ic_p8s_service_discovery_install.yml +++ b/testnet/ansible/ic_p8s_service_discovery_install.yml @@ -11,14 +11,14 @@ tasks: - name: Check action - ansible.builtin.fail: + fail: msg: "\"'ic_git_revision' not given.\"" when: ic_git_revision is not defined - name: Generate scrape configurations for [host_node_exporter, node_exporter, orchestrator, replica] vars: ic: "{{ ic_deployment_name }}" - ansible.builtin.uri: + uri: url: "{{ multiservice_discovery_server }}/" method: POST body_format: json @@ -35,7 +35,7 @@ - name: Generate scrape configurations for [boundary_nodes] vars: ic: "{{ ic_deployment_name }}" - ansible.builtin.uri: + uri: url: "{{ multiservice_discovery_server }}/add_boundary_node" method: POST body_format: json diff --git a/testnet/ansible/roles/ic_guest/tasks/aux_disk_push.yml b/testnet/ansible/roles/ic_guest/tasks/aux_disk_push.yml index e5bfc937de2..0cc52141be7 100644 --- a/testnet/ansible/roles/ic_guest/tasks/aux_disk_push.yml +++ b/testnet/ansible/roles/ic_guest/tasks/aux_disk_push.yml @@ -1,13 +1,13 @@ --- - name: Remove existing disk-img.tar.zst - ansible.builtin.file: + file: state: absent path: "{{ ic_disk_path }}/disk-img.tar.zst" run_once: true tags: [ic_guest] - name: Archive disk.img - ansible.builtin.command: + command: cmd: | tar --directory={{ ic_disk_path }} --create \ --sparse \ @@ -22,14 +22,14 @@ tags: [ic_guest] - name: Synchronize disk-img.tar.zst - ansible.builtin.copy: - mode: preserve + synchronize: + mode: push src: "{{ ic_disk_path }}/disk-img.tar.zst" dest: /var/local/ic/disk/{{ ic_deployment_name }}/{{ ic_git_revision }}/aux/disk-img.tar.zst tags: [ic_guest] - name: Unarchive file disk-img.tar.zst - ansible.builtin.unarchive: + unarchive: src: /var/local/ic/disk/{{ ic_deployment_name }}/{{ ic_git_revision }}/aux_disk-img.tar.zst dest: /var/local/ic/disk owner: root diff --git a/testnet/ansible/roles/ic_guest/tasks/create.yml b/testnet/ansible/roles/ic_guest/tasks/create.yml index 84efc6466e2..247713d9844 100644 --- a/testnet/ansible/roles/ic_guest/tasks/create.yml +++ b/testnet/ansible/roles/ic_guest/tasks/create.yml @@ -5,7 +5,7 @@ boundary_nodes: "{{ hostvars[inventory_hostname].groups.boundary | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" aux_nodes: "{{ hostvars[inventory_hostname].groups.aux | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" replica_nodes: "{{ hostvars[inventory_hostname].ic_guests | default([]) | difference(api_nodes) | difference(aux_nodes) | difference(boundary_nodes) }}" - ansible.builtin.shell: | + shell: | set -eExuo pipefail MEDIA_DIR="/var/local/ic/media/{{ ic_deployment_name }}/{{ ic_git_revision }}" @@ -26,7 +26,7 @@ aux_nodes: "{{ hostvars[inventory_hostname].groups.aux | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" replica_nodes: "{{ hostvars[inventory_hostname].ic_guests | default([]) | difference(api_nodes) | difference(aux_nodes) | difference(boundary_nodes) }}" ic_disk_size_bytes: "{{ ic_disk_gb | int * (1024 | pow(3)) | int }}" - ansible.builtin.shell: | + shell: | set -eExuo pipefail # Deploy replica images @@ -48,7 +48,7 @@ - name: Copy file media.img for boundary node VMs vars: boundary_nodes: "{{ hostvars[inventory_hostname].groups.boundary | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" - ansible.builtin.shell: | + shell: | set -eExuo pipefail MEDIA_DIR="/var/local/ic/media/{{ ic_deployment_name }}/{{ ic_git_revision }}/boundary" @@ -66,7 +66,7 @@ - name: Copy file media.img for api node VMs vars: api_nodes: "{{ hostvars[inventory_hostname].groups.api | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" - ansible.builtin.shell: | + shell: | set -eExuo pipefail MEDIA_DIR="/var/local/ic/media/{{ ic_deployment_name }}/{{ ic_git_revision }}/api" @@ -85,7 +85,7 @@ vars: api_nodes: "{{ hostvars[inventory_hostname].groups.api | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" ic_disk_size_bytes: "{{ ic_disk_gb | int * (1024 | pow(3)) | int }}" - ansible.builtin.shell: | + shell: | set -eExuo pipefail # Deploy replica images @@ -109,7 +109,7 @@ api_nodes: "{{ hostvars[inventory_hostname].groups.api | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" boundary_nodes: "{{ hostvars[inventory_hostname].groups.boundary | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" ic_disk_size_bytes: "{{ ic_disk_gb | int * (1024 | pow(3)) | int }}" - ansible.builtin.shell: | + shell: | set -eExuo pipefail # Deploy replica images @@ -132,7 +132,7 @@ vars: aux_nodes: "{{ hostvars[inventory_hostname].groups.aux | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" ic_disk_size_bytes: "{{ ic_disk_gb | int * (1024 | pow(3)) | int }}" - ansible.builtin.shell: | + shell: | set -eExuo pipefail IMAGE_DIR="/var/local/ic/disk/{{ ic_deployment_name }}/{{ ic_git_revision }}/aux" @@ -159,7 +159,7 @@ ic_memory_kib: "{{ ic_memory_gb | int * (1024 | pow(2)) }}" mac_address: "{{ hostvars[item].mac_address }}" ipv6_interface: "{{ hostvars[item].ipv6_interface | default('vlan66') }}" - ansible.builtin.template: + template: src: ../ic_guest/templates/guest.xml dest: /var/local/ic/disk/{{ ic_deployment_name }}/{{ item }}.xml owner: root @@ -179,7 +179,7 @@ mac_address: "{{ hostvars[item].mac_address }}" ipv4_interface: "{{ hostvars[item].ipv4_interface | default('vlan66') }}" ipv6_interface: "{{ hostvars[item].ipv6_interface | default('vlan66') }}" - ansible.builtin.template: + template: src: ../ic_guest/templates/generic.xml dest: /var/local/ic/disk/{{ ic_deployment_name }}/{{ item }}.xml owner: root @@ -191,7 +191,7 @@ tags: [ic_guest] - name: Define (create) a guest - ansible.builtin.shell: | + shell: | set -eExuo pipefail virsh define "/var/local/ic/disk/{{ ic_deployment_name }}/{{ item }}.xml" virsh dominfo "{{ item }}" @@ -203,19 +203,19 @@ tags: [ic_guest] - name: Check if dfinity-hsm-agent service exist - ansible.builtin.command: service dfinity-hsm-agent status + command: service dfinity-hsm-agent status register: dfinity_hsm_agent_status failed_when: not(dfinity_hsm_agent_status.rc == 4 or dfinity_hsm_agent_status.rc == 3 or dfinity_hsm_agent_status.rc == 0) - name: Stop the dfinity-hsm-agent.service - ansible.builtin.systemd: + systemd: state: stopped name: dfinity-hsm-agent when: dfinity_hsm_agent_status.rc == 0 tags: [ic_guest] - name: Ensure potentially conflicting kernel modules are not loaded - ansible.builtin.shell: | + shell: | if lsmod | grep -q vhost_vsock; then exit 0; fi # the required kernel module is already loaded get_depends() { lsmod | awk -vmod="$1" '($1==mod){gsub(",", "\n", $4); print $4}' ; } recursive_get_depends() { while read line ; do echo $line; get_depends $line ; done | grep . ; } @@ -228,12 +228,12 @@ tags: [ic_guest] - name: Load the vhost_vsock kernel module - ansible.builtin.command: modprobe vhost_vsock + command: modprobe vhost_vsock ignore_errors: true tags: [ic_guest] - name: Start the dfinity-hsm-agent.service - ansible.builtin.systemd: + systemd: state: started name: dfinity-hsm-agent when: dfinity_hsm_agent_status.rc == 0 diff --git a/testnet/ansible/roles/ic_guest/tasks/destroy.yml b/testnet/ansible/roles/ic_guest/tasks/destroy.yml index 5cff20c4bdf..c9ead43d674 100644 --- a/testnet/ansible/roles/ic_guest/tasks/destroy.yml +++ b/testnet/ansible/roles/ic_guest/tasks/destroy.yml @@ -1,6 +1,6 @@ --- - name: Destroy guests - ansible.builtin.shell: | + shell: | set -euo pipefail # Destroy (stop) all existing VMs from this deployment running on this node diff --git a/testnet/ansible/roles/ic_guest/tasks/disk_pull.yml b/testnet/ansible/roles/ic_guest/tasks/disk_pull.yml index 9a85c8fa1ae..a622d86b8bc 100644 --- a/testnet/ansible/roles/ic_guest/tasks/disk_pull.yml +++ b/testnet/ansible/roles/ic_guest/tasks/disk_pull.yml @@ -4,7 +4,7 @@ aux_nodes: "{{ hostvars[inventory_hostname].groups.aux | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" boundary_nodes: "{{ hostvars[inventory_hostname].groups.boundary | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" replica_nodes: "{{ hostvars[inventory_hostname].ic_guests | default([]) | difference(aux_nodes) | difference(boundary_nodes) }}" - ansible.builtin.shell: | + shell: | set -eExuo pipefail # check if disk image is available on the new or the old location for the given hash PROXY_CONNECT_TIMEOUT=5 @@ -97,7 +97,7 @@ aux_nodes: "{{ hostvars[inventory_hostname].groups.aux | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" boundary_nodes: "{{ hostvars[inventory_hostname].groups.boundary | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" replica_nodes: "{{ hostvars[inventory_hostname].ic_guests | default([]) | difference(aux_nodes) | difference(boundary_nodes) }}" - ansible.builtin.shell: | + shell: | # if the unpacked file does not exist or if the archive is newer than the unpacked file set -eExuo pipefail diff --git a/testnet/ansible/roles/ic_guest/tasks/disk_push.yml b/testnet/ansible/roles/ic_guest/tasks/disk_push.yml index 7185917f9a7..00a7303733f 100644 --- a/testnet/ansible/roles/ic_guest/tasks/disk_push.yml +++ b/testnet/ansible/roles/ic_guest/tasks/disk_push.yml @@ -1,13 +1,13 @@ --- - name: Remove existing disk-img.tar.zst - ansible.builtin.file: + file: state: absent path: "{{ ic_disk_path }}/disk-img.tar.zst" run_once: true tags: [ic_guest] - name: Archive disk.img - ansible.builtin.command: + command: cmd: | tar --directory={{ ic_disk_path }} --create \ --sparse \ @@ -22,14 +22,14 @@ tags: [ic_guest] - name: Synchronize disk-img.tar.zst - ansible.builtin.copy: - mode: preserve + synchronize: + mode: push src: "{{ ic_disk_path }}/disk-img.tar.zst" dest: /var/local/ic/disk/{{ ic_deployment_name }}/{{ ic_git_revision }}/disk-img.tar.zst tags: [ic_guest] - name: Unarchive file disk-img.tar.zst - ansible.builtin.unarchive: + unarchive: src: /var/local/ic/disk/{{ ic_deployment_name }}/{{ ic_git_revision }}/disk-img.tar.zst dest: /var/local/ic/disk owner: root diff --git a/testnet/ansible/roles/ic_guest/tasks/install.yml b/testnet/ansible/roles/ic_guest/tasks/install.yml index bc7eab03342..66e65c1e8f8 100644 --- a/testnet/ansible/roles/ic_guest/tasks/install.yml +++ b/testnet/ansible/roles/ic_guest/tasks/install.yml @@ -1,6 +1,6 @@ --- - name: Wait for replica to listen on all NNS nodes on port 8080 - ansible.builtin.include_tasks: + include_tasks: file: url_waitfor_connect.yml loop: "{{ groups['nns'] }}" loop_control: @@ -8,21 +8,21 @@ tags: [ic_guest] - name: Check if the initial neuron config exists - ansible.builtin.stat: + stat: path: "{{ inventory_file | dirname }}/initial-neurons.csv" register: stat_result become: false delegate_to: localhost run_once: true -- ansible.builtin.debug: +- debug: msg: Using the initial neuron configuration from {{ inventory_file | dirname }}/initial-neurons.csv delegate_to: localhost run_once: true when: stat_result.stat.exists - name: Set initial_neurons to path to csv file it is exists and is not set already - ansible.builtin.set_fact: + set_fact: initial_neurons: "{{ stat_result.stat.path }}" delegate_to: localhost run_once: true @@ -30,15 +30,14 @@ when: stat_result.stat.exists and initial_neurons is undefined - name: Print value of initial_neurons - ansible.builtin.debug: + debug: var: initial_neurons - cmd: "" delegate_to: localhost run_once: true become: false - name: Get Custom NNS canisters - ansible.builtin.shell: | + shell: | set -exuo pipefail echo "Media path: {{ ic_media_path }}" echo "Adding canisters from '{{ custom_canister_dir }}'" @@ -54,13 +53,12 @@ tags: [ic_guest] when: custom_canister_dir is defined -- ansible.builtin.debug: +- debug: var: nns_canisters.stdout_lines - cmd: "" when: custom_canister_dir is defined - name: Install NNS canisters - ansible.builtin.shell: | + shell: | set -exuo pipefail NODE0_NAME={{ groups['nns'][0] }} @@ -128,7 +126,7 @@ tags: [ic_guest] - name: Wait for all nodes to listen on port 8080 - ansible.builtin.include_tasks: + include_tasks: file: url_waitfor_connect.yml when: - "'subnet_unassigned' not in hostvars[item].group_names and 'api' not in hostvars[item].group_names diff --git a/testnet/ansible/roles/ic_guest/tasks/main.yml b/testnet/ansible/roles/ic_guest/tasks/main.yml index 4ba9e1a06bc..503d4ee23e9 100644 --- a/testnet/ansible/roles/ic_guest/tasks/main.yml +++ b/testnet/ansible/roles/ic_guest/tasks/main.yml @@ -1,60 +1,60 @@ --- - name: Import tasks prepare - ansible.builtin.import_tasks: prepare.yml + import_tasks: prepare.yml when: ic_state == "create" tags: [ic_guest] - name: Import tasks disk_pull - ansible.builtin.import_tasks: disk_pull.yml + import_tasks: disk_pull.yml when: ic_state == "create" and (ic_disk_path | length == 0) tags: [ic_guest] - name: Import tasks disk_push - ansible.builtin.import_tasks: disk_push.yml + import_tasks: disk_push.yml when: ic_state == "create" and (ic_disk_path | length > 0) tags: [ic_guest] - name: Import tasks disk_push - ansible.builtin.import_tasks: aux_disk_push.yml + import_tasks: aux_disk_push.yml when: ic_state == "create" and ic_disk_path tags: [ic_guest] - name: Import tasks media_pull - ansible.builtin.import_tasks: media_pull.yml + import_tasks: media_pull.yml when: ic_state == "create" and (ic_media_path | length == 0) tags: [ic_guest] - name: Import tasks media_push - ansible.builtin.import_tasks: media_push.yml + import_tasks: media_push.yml when: ic_state == "create" and (ic_media_path | length > 0) tags: [ic_guest] - name: Import tasks - ansible.builtin.import_tasks: create.yml + import_tasks: create.yml when: ic_state == "create" tags: [ic_guest] - name: Import tasks start - ansible.builtin.import_tasks: start.yml + import_tasks: start.yml when: ic_state == "start" tags: [ic_guest] - name: Import tasks shutdown - ansible.builtin.import_tasks: shutdown.yml + import_tasks: shutdown.yml when: ic_state == "shutdown" tags: [ic_guest] - name: Import tasks install - ansible.builtin.import_tasks: install.yml + import_tasks: install.yml when: ic_state == "install" tags: [ic_guest] - name: Import tasks stop - ansible.builtin.import_tasks: stop.yml + import_tasks: stop.yml when: ic_state == "stop" tags: [ic_guest] - name: Import tasks destroy - ansible.builtin.import_tasks: destroy.yml + import_tasks: destroy.yml when: ic_state == "destroy" tags: [ic_guest] diff --git a/testnet/ansible/roles/ic_guest/tasks/media_pull.yml b/testnet/ansible/roles/ic_guest/tasks/media_pull.yml index b2c7374e158..e01442448e6 100644 --- a/testnet/ansible/roles/ic_guest/tasks/media_pull.yml +++ b/testnet/ansible/roles/ic_guest/tasks/media_pull.yml @@ -1,5 +1,5 @@ --- - name: Print information - ansible.builtin.debug: + debug: msg: 'CI/CD pipeline for media.img unavailable, use push mode: -e ic_media_path="/home/user/media/"' tags: [ic_guest] diff --git a/testnet/ansible/roles/ic_guest/tasks/media_push.yml b/testnet/ansible/roles/ic_guest/tasks/media_push.yml index 33d10aff849..12c6bb883f1 100644 --- a/testnet/ansible/roles/ic_guest/tasks/media_push.yml +++ b/testnet/ansible/roles/ic_guest/tasks/media_push.yml @@ -5,7 +5,7 @@ aux_nodes: "{{ hostvars[inventory_hostname].groups.aux | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" boundary_nodes: "{{ hostvars[inventory_hostname].groups.boundary | intersect(hostvars[inventory_hostname].ic_guests) | default([]) }}" replica_nodes: "{{ hostvars[inventory_hostname].ic_guests | default([]) | difference(api_nodes) | difference(aux_nodes) | difference(boundary_nodes) }}" - ansible.builtin.copy: + copy: # copy the media image built for the node, or fallback to the generic 'media.img' src: "{{ lookup('first_found', ['{{ ic_media_path }}/{{ item }}.img', '{{ ic_media_path }}/media.img']) }}" dest: /var/local/ic/media/{{ ic_deployment_name }}/{{ ic_git_revision }}/{{ item }}.img @@ -16,7 +16,7 @@ tags: [ic_guest] - name: Copy new api node VM media.img to remote - ansible.builtin.copy: + copy: # copy the media image built for the new api node src: "{{ api_media_path }}/{{ item }}.img" dest: /var/local/ic/media/{{ ic_deployment_name }}/{{ ic_git_revision }}/api/ @@ -27,7 +27,7 @@ tags: [api_node_vm] - name: Copy single api node VM media.img to remote - ansible.builtin.copy: + copy: # copy the media image built for the new api node src: "{{ api_media_path }}/{{ item }}.img" dest: /var/lib/libvirt/media/{{ item }}.img @@ -40,7 +40,7 @@ tags: [api_node_vm] - name: Copy new boundary node VM media.img to remote - ansible.builtin.copy: + copy: # copy the media image built for the new boundary node src: "{{ bn_media_path }}/{{ item }}.img" dest: /var/local/ic/media/{{ ic_deployment_name }}/{{ ic_git_revision }}/boundary/ @@ -51,7 +51,7 @@ tags: [boundary_node_vm] - name: Copy single boundary node VM media.img to remote - ansible.builtin.copy: + copy: # copy the media image built for the new boundary node src: "{{ bn_media_path }}/{{ item }}.img" dest: /var/lib/libvirt/media/{{ item }}.img diff --git a/testnet/ansible/roles/ic_guest/tasks/prepare.yml b/testnet/ansible/roles/ic_guest/tasks/prepare.yml index 903ef351d8c..21c1135d9ca 100644 --- a/testnet/ansible/roles/ic_guest/tasks/prepare.yml +++ b/testnet/ansible/roles/ic_guest/tasks/prepare.yml @@ -1,6 +1,6 @@ --- - name: Create directories - ansible.builtin.shell: | + shell: | set -eExuo pipefail mkdir -v -p /var/local/ic/{disk,media}/{{ ic_deployment_name }}/{{ ic_git_revision }}/ @@ -18,13 +18,13 @@ tags: [ic_guest] - name: Install GNU parallel - ansible.builtin.apt: + apt: name: parallel state: present tags: [ic_guest] - name: Install zstd - ansible.builtin.apt: + apt: name: zstd state: present tags: [ic_guest] diff --git a/testnet/ansible/roles/ic_guest/tasks/shutdown.yml b/testnet/ansible/roles/ic_guest/tasks/shutdown.yml index c7f656b4f53..8afc1461126 100644 --- a/testnet/ansible/roles/ic_guest/tasks/shutdown.yml +++ b/testnet/ansible/roles/ic_guest/tasks/shutdown.yml @@ -1,6 +1,6 @@ --- # Prompt for USB key press. ASCII art is hard to escape, so base64 it is -- ansible.builtin.debug: +- debug: msg: - "{{ '8J+aqPCflJHwn5qo8J+UkfCfmqjwn5SR8J+aqPCflJHwn5qo8J+UkfCfmqjwn5SR8J+aqPCflJHwn5qo8J+UkfCfmqjwn5SR8J+aqPCflJHwn5qo8J+UkfCfmqjwn5SR8J+aqPCflJHwn5qo8J+UkfCfmqjwn5SR8J+aqPCflJHwn5qo8J+UkfCfmqjwn5SR8J+aqPCflJHwn5qo' | b64decode }}" @@ -15,7 +15,7 @@ tags: [ic_guest] - name: Shutdown guest - ansible.builtin.shell: | + shell: | set -Exu virsh shutdown "{{ item }}" # virsh shutdown command is asynchronous, so the next command waits upto 70sec for the VM to shutdown diff --git a/testnet/ansible/roles/ic_guest/tasks/start.yml b/testnet/ansible/roles/ic_guest/tasks/start.yml index 91b59018ca0..45fc3e8890a 100644 --- a/testnet/ansible/roles/ic_guest/tasks/start.yml +++ b/testnet/ansible/roles/ic_guest/tasks/start.yml @@ -1,6 +1,6 @@ --- - name: Start guest - ansible.builtin.shell: | + shell: | set -eExuo pipefail virsh start "{{ item }}" || true virsh autostart "{{ item }}" || true diff --git a/testnet/ansible/roles/ic_guest/tasks/stop.yml b/testnet/ansible/roles/ic_guest/tasks/stop.yml index 3c50986090b..5cbbb1a9c85 100644 --- a/testnet/ansible/roles/ic_guest/tasks/stop.yml +++ b/testnet/ansible/roles/ic_guest/tasks/stop.yml @@ -1,6 +1,6 @@ --- - name: Stop guest - ansible.builtin.shell: | + shell: | set -eExu virsh stop "{{ item }}" virsh autostart --disable "{{ item }}" diff --git a/testnet/ansible/roles/ic_guest/tasks/url_waitfor_connect.yml b/testnet/ansible/roles/ic_guest/tasks/url_waitfor_connect.yml index 30efe85c647..b835eb729f4 100644 --- a/testnet/ansible/roles/ic_guest/tasks/url_waitfor_connect.yml +++ b/testnet/ansible/roles/ic_guest/tasks/url_waitfor_connect.yml @@ -1,11 +1,11 @@ --- - name: URL to be checked - ansible.builtin.debug: + debug: msg: "{{ hostvars[item].guest_hostname }}: http://[{{ hostvars[item].ipv6_address }}]:8080/api/v2/status" tags: [ic_guest] - name: Wait for listener to respond on port 8080 - ansible.builtin.uri: + uri: url: http://[{{ hostvars[item].ipv6_address }}]:8080/api/v2/status register: _result until: _result.status == 200 diff --git a/testnet/ansible/roles/ic_guest_prod/tasks/disk_pull.yml b/testnet/ansible/roles/ic_guest_prod/tasks/disk_pull.yml index 64dae2e8ea9..cf91fbd24b2 100644 --- a/testnet/ansible/roles/ic_guest_prod/tasks/disk_pull.yml +++ b/testnet/ansible/roles/ic_guest_prod/tasks/disk_pull.yml @@ -1,6 +1,6 @@ --- - name: Download disk images - ansible.builtin.shell: | + shell: | set -eExuo pipefail CDN_URL="https://download.dfinity.systems" @@ -36,7 +36,7 @@ tags: [ic_guest] - name: Unarchive file disk-img.tar.zst - ansible.builtin.shell: | + shell: | set -eExuo pipefail # unarchive the disk diff --git a/testnet/ansible/roles/ic_guest_prod/tasks/main.yml b/testnet/ansible/roles/ic_guest_prod/tasks/main.yml index d66b20b7091..877306bb882 100644 --- a/testnet/ansible/roles/ic_guest_prod/tasks/main.yml +++ b/testnet/ansible/roles/ic_guest_prod/tasks/main.yml @@ -1,62 +1,62 @@ --- - name: Import tasks prepare - ansible.builtin.import_tasks: + import_tasks: # Import local tasks/prepare.yml if exists, otherwise use ic_guest/tasks/prepare.yml file: "{{ lookup('first_found', ['tasks/prepare.yml', '../ic_guest/tasks/prepare.yml']) }}" when: ic_state == "create" tags: [ic_guest] - name: Import tasks disk_pull - ansible.builtin.import_tasks: + import_tasks: file: "{{ lookup('first_found', ['tasks/disk_pull.yml', '../ic_guest/tasks/disk_pull.yml']) }}" when: ic_state == "create" and (ic_disk_path | length == 0) tags: [ic_guest] - name: Import tasks disk_push - ansible.builtin.import_tasks: + import_tasks: file: "{{ lookup('first_found', ['tasks/disk_push.yml', '../ic_guest/tasks/disk_push.yml']) }}" when: ic_state == "create" and (ic_disk_path | length > 0) tags: [ic_guest] - name: Import tasks media_pull - ansible.builtin.import_tasks: + import_tasks: file: "{{ lookup('first_found', ['tasks/media_pull.yml', '../ic_guest/tasks/media_pull.yml']) }}" when: ic_state == "create" and (ic_media_path | length == 0) tags: [ic_guest] - name: Import tasks media_push - ansible.builtin.import_tasks: + import_tasks: file: "{{ lookup('first_found', ['tasks/media_push.yml', '../ic_guest/tasks/media_push.yml']) }}" when: ic_state == "create" and (ic_media_path | length > 0) tags: [ic_guest] - name: Import tasks - ansible.builtin.import_tasks: + import_tasks: file: "{{ lookup('first_found', ['tasks/create.yml', '../ic_guest/tasks/create.yml']) }}" when: ic_state == "create" tags: [ic_guest] - name: Import tasks start - ansible.builtin.import_tasks: + import_tasks: file: "{{ lookup('first_found', ['tasks/start.yml', '../ic_guest/tasks/start.yml']) }}" when: ic_state == "start" tags: - ic_guest - name: Import tasks install - ansible.builtin.import_tasks: + import_tasks: file: "{{ lookup('first_found', ['tasks/install.yml', '../ic_guest/tasks/install.yml']) }}" when: ic_state == "install" tags: [ic_guest] - name: Import tasks stop - ansible.builtin.import_tasks: + import_tasks: file: "{{ lookup('first_found', ['tasks/stop.yml', '../ic_guest/tasks/stop.yml']) }}" when: ic_state == "stop" tags: [ic_guest] - name: Import tasks destroy - ansible.builtin.import_tasks: + import_tasks: file: "{{ lookup('first_found', ['tasks/destroy.yml', '../ic_guest/tasks/destroy.yml']) }}" when: ic_state == "destroy" tags: [ic_guest] From 1b65617d7f30b386e880f908949a4f56b524d566 Mon Sep 17 00:00:00 2001 From: Alin Sinpalean <58422065+alin-at-dfinity@users.noreply.github.com> Date: Thu, 5 Dec 2024 11:21:45 +0100 Subject: [PATCH 20/23] fix: Fix XNet end-to-end tests (#2985) Encode the arguments to the start method as the newly defined StartArgs struct instead of a tuple. --- rs/tests/message_routing/common/common.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/rs/tests/message_routing/common/common.rs b/rs/tests/message_routing/common/common.rs index 078d1cbe221..dc72b6836f5 100644 --- a/rs/tests/message_routing/common/common.rs +++ b/rs/tests/message_routing/common/common.rs @@ -7,6 +7,7 @@ use ic_system_test_driver::driver::test_env::TestEnv; use ic_system_test_driver::driver::test_env_api::get_dependency_path; use slog::info; use std::{convert::TryFrom, env}; +use xnet_test::StartArgs; /// Concurrently calls `start` on all canisters in `canisters` with the /// given parameters. @@ -29,10 +30,14 @@ pub async fn start_all_canisters( .enumerate() .flat_map(|(x, v)| v.iter().enumerate().map(move |(y, v)| (x, y, v))) { - let input = (&topology, canister_to_subnet_rate, payload_size_bytes); + let input = StartArgs { + network_topology: topology.clone(), + canister_to_subnet_rate, + payload_size_bytes, + }; futures.push(async move { let _: String = canister - .update_("start", candid, input) + .update_("start", candid, (input,)) .await .unwrap_or_else(|e| { panic!( From 1f88ae12ac756a91901ec49b3506400ae55d6829 Mon Sep 17 00:00:00 2001 From: oggy-dfin <89794951+oggy-dfin@users.noreply.github.com> Date: Thu, 5 Dec 2024 12:57:09 +0100 Subject: [PATCH 21/23] feat(IC-1579): Spawn neuron(s) TLA instrumentation (#2944) This adds the TLA instrumentation for spawn_neuron/spawn_neurons. Also extends the instrumentation framework to deal with non-async methods (as `spawn_neuron` isn't async), fixes a bug in the instrumentation (where a failed check would panic, leading to the starvation of the channel listener), and adds state diffs to more easily debug divergences from the model. --- rs/nns/governance/src/governance.rs | 20 +- rs/nns/governance/src/governance/tla/mod.rs | 55 ++++-- .../src/governance/tla/spawn_neuron.rs | 35 ++++ .../src/governance/tla/spawn_neurons.rs | 57 ++++++ rs/nns/governance/tests/governance.rs | 2 + rs/nns/governance/tla/Claim_Neuron.tla | 23 ++- .../governance/tla/Claim_Neuron_Apalache.tla | 6 +- .../governance/tla/Merge_Neurons_Apalache.tla | 6 +- rs/nns/governance/tla/Spawn_Neuron.tla | 107 ++++++++++ .../governance/tla/Spawn_Neuron_Apalache.tla | 64 ++++++ rs/nns/governance/tla/Spawn_Neurons.tla | 187 ++++++++++++++++++ .../governance/tla/Spawn_Neurons_Apalache.tla | 63 ++++++ .../governance/tla/Split_Neuron_Apalache.tla | 20 +- .../tla_instrumentation/src/checker.rs | 2 + .../tla_instrumentation/src/tla_state.rs | 28 ++- .../tla_instrumentation/src/tla_value.rs | 105 +++++++++- .../tests/multiple_calls.rs | 6 +- .../tla_instrumentation/tests/structs.rs | 6 +- .../src/lib.rs | 81 ++++---- 19 files changed, 792 insertions(+), 81 deletions(-) create mode 100644 rs/nns/governance/src/governance/tla/spawn_neuron.rs create mode 100644 rs/nns/governance/src/governance/tla/spawn_neurons.rs create mode 100644 rs/nns/governance/tla/Spawn_Neuron.tla create mode 100644 rs/nns/governance/tla/Spawn_Neuron_Apalache.tla create mode 100644 rs/nns/governance/tla/Spawn_Neurons.tla create mode 100644 rs/nns/governance/tla/Spawn_Neurons_Apalache.tla diff --git a/rs/nns/governance/src/governance.rs b/rs/nns/governance/src/governance.rs index c4b1c7645d2..52f8fa866db 100644 --- a/rs/nns/governance/src/governance.rs +++ b/rs/nns/governance/src/governance.rs @@ -138,10 +138,13 @@ pub mod tla_macros; #[cfg(feature = "tla")] pub mod tla; +#[cfg(feature = "tla")] +use std::collections::BTreeSet; #[cfg(feature = "tla")] pub use tla::{ tla_update_method, InstrumentationState, ToTla, CLAIM_NEURON_DESC, MERGE_NEURONS_DESC, - SPLIT_NEURON_DESC, TLA_INSTRUMENTATION_STATE, TLA_TRACES_LKEY, TLA_TRACES_MUTEX, + SPAWN_NEURONS_DESC, SPAWN_NEURON_DESC, SPLIT_NEURON_DESC, TLA_INSTRUMENTATION_STATE, + TLA_TRACES_LKEY, TLA_TRACES_MUTEX, }; // 70 KB (for executing NNS functions that are not canister upgrades) @@ -3100,6 +3103,7 @@ impl Governance { /// - The parent neuron is not spawning itself. /// - The maturity to move to the new neuron must be such that, with every maturity modulation, at least /// NetworkEconomics::neuron_minimum_spawn_stake_e8s are created when the maturity is spawn. + #[cfg_attr(feature = "tla", tla_update_method(SPAWN_NEURON_DESC.clone()))] pub fn spawn_neuron( &mut self, id: &NeuronId, @@ -6689,6 +6693,7 @@ impl Governance { /// This means that programming in this method needs to be extra-defensive on the handling of results so that /// we're sure not to trap after we've acquired the global lock and made an async call, as otherwise the global /// lock will be permanently held and no spawning will occur until a upgrade to fix it is made. + #[cfg_attr(feature = "tla", tla_update_method(SPAWN_NEURONS_DESC.clone()))] pub async fn maybe_spawn_neurons(&mut self) { if !self.can_spawn_neurons() { return; @@ -6720,6 +6725,12 @@ impl Governance { .neuron_store .list_ready_to_spawn_neuron_ids(now_seconds); + // We can't alias ready_to_spawn_ids in the loop below, but the TLA model needs access to it, + // so we clone it here. + #[cfg(feature = "tla")] + let mut _tla_ready_to_spawn_ids: BTreeSet = + ready_to_spawn_ids.iter().map(|nid| nid.id).collect(); + for neuron_id in ready_to_spawn_ids { // Actually mint the neuron's ICP. let in_flight_command = NeuronInFlightCommand { @@ -6774,6 +6785,11 @@ impl Governance { }) .unwrap(); + tla_log_locals! { + neuron_id: neuron_id.id, + ready_to_spawn_ids: _tla_ready_to_spawn_ids + }; + // Do the transfer, this is a minting transfer, from the governance canister's // (which is also the minting canister) main account into the neuron's // subaccount. @@ -6819,6 +6835,8 @@ impl Governance { continue; } } + #[cfg(feature = "tla")] + _tla_ready_to_spawn_ids.remove(&neuron_id.id); } // Release the global spawning lock diff --git a/rs/nns/governance/src/governance/tla/mod.rs b/rs/nns/governance/src/governance/tla/mod.rs index 89ea93fbe64..7c2d4b9131d 100644 --- a/rs/nns/governance/src/governance/tla/mod.rs +++ b/rs/nns/governance/src/governance/tla/mod.rs @@ -26,10 +26,14 @@ pub use store::{TLA_INSTRUMENTATION_STATE, TLA_TRACES_LKEY, TLA_TRACES_MUTEX}; mod claim_neuron; mod merge_neurons; +mod spawn_neuron; +mod spawn_neurons; mod split_neuron; pub use claim_neuron::CLAIM_NEURON_DESC; pub use merge_neurons::MERGE_NEURONS_DESC; +pub use spawn_neuron::SPAWN_NEURON_DESC; +pub use spawn_neurons::SPAWN_NEURONS_DESC; pub use split_neuron::SPLIT_NEURON_DESC; fn neuron_global(gov: &Governance) -> TlaValue { @@ -104,6 +108,20 @@ pub fn get_tla_globals(gov: &Governance) -> GlobalState { .to_tla_value(), ); state.add("transaction_fee", gov.transaction_fee().to_tla_value()); + state.add( + "spawning_neurons", + gov.heap_data + .spawning_neurons + .unwrap_or(false) + .to_tla_value(), + ); + state.add( + "cached_maturity_basis_points", + gov.heap_data + .cached_daily_maturity_modulation_basis_points + .unwrap_or(0) + .to_tla_value(), + ); state } @@ -167,6 +185,11 @@ fn post_process_trace(trace: &mut Vec) { .0 .remove("min_stake") .expect("Didn't record the min stake"); + state + .0 + .0 + .remove("cached_maturity_basis_points") + .expect("Didn't record the cached maturity basis points"); if !state.0 .0.contains_key("governance_to_ledger") { state.0 .0.insert( "governance_to_ledger".to_string(), @@ -249,7 +272,7 @@ pub fn check_traces() { ); } println!( - "Total of {} state pairs to be checked with Apalache; will retain {}", + "Total of {} state pairs to be checked with Apalache; will retain at most {}", total_pairs, STATE_PAIR_COUNT_LIMIT ) } @@ -291,16 +314,20 @@ pub fn check_traces() { } // A poor man's parallel_map; process up to MAX_THREADS state pairs in parallel. Use mpsc channels - // to signal threads becoming available. + // to signal threads becoming available. Additionally, use the channels to signal any errors while + // performing the Apalache checks. const MAX_THREADS: usize = 20; let mut running_threads = 0; - let (thread_freed_tx, thread_freed_rx) = mpsc::channel::<()>(); + let (thread_freed_tx, thread_freed_rx) = mpsc::channel::(); for (i, (update, constants, pair)) in all_pairs.iter().enumerate() { println!("Checking state pair #{}", i + 1); if running_threads >= MAX_THREADS { - thread_freed_rx + if thread_freed_rx .recv() - .expect("Error while waiting for the thread completion signal"); + .expect("Error while waiting for the thread completion signal") + { + panic!("An Apalache thread signalled an error") + } running_threads -= 1; } @@ -314,7 +341,7 @@ pub fn check_traces() { running_threads += 1; let _handle = thread::spawn(move || { - let _ = check_tla_code_link( + let res = check_tla_code_link( &apalache, PredicateDescription { tla_module, @@ -329,20 +356,26 @@ pub fn check_traces() { println!("If you are confident that your change is correct, please contact the #formal-models Slack channel and describe the problem."); println!("You can edit nns/governance/feature_flags.bzl to disable TLA checks in the CI and get on with your business."); println!("-------------------"); - println!("Error occured while checking the state pair:\n{:#?}\nwith constants:\n{:#?}", e.pair, e.constants); + println!("Error occured in TLA model {:?} and state pair:\n{:#?}\nwith constants:\n{:#?}", e.model, e.pair, e.constants); + let diff = e.pair.diff(); + if !diff.is_empty() { + println!("Diff between states: {:#?}", diff); + } println!("Apalache returned:\n{:#?}", e.apalache_error); - panic!("Apalache check failed") }); thread_freed_rx - .send(()) + .send(res.is_err()) .expect("Couldn't send the thread completion signal"); }); } while running_threads > 0 { - thread_freed_rx + if thread_freed_rx .recv() - .expect("Error while waiting for the thread completion signal"); + .expect("Error while waiting for the thread completion signal") + { + panic!("An Apalache thread signalled an error") + } running_threads -= 1; } } diff --git a/rs/nns/governance/src/governance/tla/spawn_neuron.rs b/rs/nns/governance/src/governance/tla/spawn_neuron.rs new file mode 100644 index 00000000000..d079839759b --- /dev/null +++ b/rs/nns/governance/src/governance/tla/spawn_neuron.rs @@ -0,0 +1,35 @@ +use lazy_static::lazy_static; +use tla_instrumentation::{Label, ResolvedStatePair, TlaConstantAssignment, Update, VarAssignment}; + +use super::common::governance_account_id; +use super::{extract_common_constants, post_process_trace}; + +lazy_static! { + pub static ref SPAWN_NEURON_DESC: Update = { + const PID: &str = "Spawn_Neuron"; + let default_locals = VarAssignment::new(); + + Update { + default_start_locals: default_locals.clone(), + default_end_locals: default_locals, + start_label: Label::new("SpawnNeuronStart"), + end_label: Label::new("SpawnNeuronStart"), + process_id: PID.to_string(), + canister_name: "governance".to_string(), + post_process: |trace| { + let constants = extract_spawn_neuron_constants(PID, trace); + post_process_trace(trace); + constants + }, + } + }; +} + +fn extract_spawn_neuron_constants(pid: &str, trace: &[ResolvedStatePair]) -> TlaConstantAssignment { + TlaConstantAssignment { + constants: extract_common_constants(pid, trace) + .into_iter() + .chain([("Minting_Account_Id".to_string(), governance_account_id())]) + .collect(), + } +} diff --git a/rs/nns/governance/src/governance/tla/spawn_neurons.rs b/rs/nns/governance/src/governance/tla/spawn_neurons.rs new file mode 100644 index 00000000000..ec50f6198cd --- /dev/null +++ b/rs/nns/governance/src/governance/tla/spawn_neurons.rs @@ -0,0 +1,57 @@ +use lazy_static::lazy_static; +use std::collections::BTreeSet; +use tla_instrumentation::{ + Label, ResolvedStatePair, TlaConstantAssignment, ToTla, Update, VarAssignment, +}; + +use super::common::governance_account_id; +use super::{extract_common_constants, post_process_trace}; + +lazy_static! { + pub static ref SPAWN_NEURONS_DESC: Update = { + const PID: &str = "Spawn_Neurons"; + let default_locals = VarAssignment::new() + .add("neuron_id", 0_u64.to_tla_value()) + .add("ready_to_spawn_ids", BTreeSet::::new().to_tla_value()); + Update { + default_start_locals: default_locals.clone(), + default_end_locals: default_locals, + start_label: Label::new("SpawnNeurons_Start"), + end_label: Label::new("SpawnNeurons_Start"), + process_id: PID.to_string(), + canister_name: "governance".to_string(), + post_process: |trace| { + let constants = extract_spawn_neurons_constants(PID, trace); + post_process_trace(trace); + constants + }, + } + }; +} + +fn extract_spawn_neurons_constants( + pid: &str, + trace: &[ResolvedStatePair], +) -> TlaConstantAssignment { + let maturity_modulation = ( + "MATURITY_BASIS_POINTS".to_string(), + trace + .first() + .map(|pair| { + pair.start + .get("cached_maturity_basis_points") + .expect("cached_maturity_basis_points not recorded") + .clone() + }) + .unwrap_or(0_i32.to_tla_value()), + ); + TlaConstantAssignment { + constants: extract_common_constants(pid, trace) + .into_iter() + .chain([ + ("Minting_Account_Id".to_string(), governance_account_id()), + maturity_modulation, + ]) + .collect(), + } +} diff --git a/rs/nns/governance/tests/governance.rs b/rs/nns/governance/tests/governance.rs index d10fe64cc6f..6b91f55aad3 100644 --- a/rs/nns/governance/tests/governance.rs +++ b/rs/nns/governance/tests/governance.rs @@ -5750,6 +5750,7 @@ fn run_periodic_tasks_often_enough_to_update_maturity_modulation(gov: &mut Gover /// * The spawned neuron always has neuron_type: None, even if the parent's /// neuron_type is NeuronType::Seed. #[test] +#[cfg_attr(feature = "tla", with_tla_trace_check)] fn test_neuron_spawn() { let from = *TEST_NEURON_1_OWNER_PRINCIPAL; // Compute the subaccount to which the transfer would have been made @@ -5938,6 +5939,7 @@ fn test_neuron_spawn() { } #[test] +#[cfg_attr(feature = "tla", with_tla_trace_check)] fn test_neuron_spawn_with_subaccount() { let from = *TEST_NEURON_1_OWNER_PRINCIPAL; // Compute the subaccount to which the transfer would have been made diff --git a/rs/nns/governance/tla/Claim_Neuron.tla b/rs/nns/governance/tla/Claim_Neuron.tla index 17ddc4142a7..cd38eebed88 100644 --- a/rs/nns/governance/tla/Claim_Neuron.tla +++ b/rs/nns/governance/tla/Claim_Neuron.tla @@ -40,6 +40,7 @@ variables \* The queue of messages sent from the governance canister to the ledger canister governance_to_ledger = <<>>; ledger_to_governance = {}; + spawning_neurons = FALSE; macro cn_reset_local_vars() { account := DUMMY_ACCOUNT; @@ -101,12 +102,12 @@ process ( Claim_Neuron \in Claim_Neuron_Process_Ids ) } *) -\* BEGIN TRANSLATION (chksum(pcal) = "2eb57c75" /\ chksum(tla) = "9f05b2c5") -VARIABLES neuron, neuron_id_by_account, locks, governance_to_ledger, - ledger_to_governance, pc, account, neuron_id +\* BEGIN TRANSLATION (chksum(pcal) = "a7e1f417" /\ chksum(tla) = "bed42424") +VARIABLES pc, neuron, neuron_id_by_account, locks, governance_to_ledger, + ledger_to_governance, spawning_neurons, account, neuron_id -vars == << neuron, neuron_id_by_account, locks, governance_to_ledger, - ledger_to_governance, pc, account, neuron_id >> +vars == << pc, neuron, neuron_id_by_account, locks, governance_to_ledger, + ledger_to_governance, spawning_neurons, account, neuron_id >> ProcSet == (Claim_Neuron_Process_Ids) @@ -116,6 +117,7 @@ Init == (* Global variables *) /\ locks = {} /\ governance_to_ledger = <<>> /\ ledger_to_governance = {} + /\ spawning_neurons = FALSE (* Process Claim_Neuron *) /\ account = [self \in Claim_Neuron_Process_Ids |-> DUMMY_ACCOUNT] /\ neuron_id = [self \in Claim_Neuron_Process_Ids |-> 0] @@ -127,14 +129,14 @@ ClaimNeuron1(self) == /\ pc[self] = "ClaimNeuron1" \/ /\ \E aid \in Governance_Account_Ids \ DOMAIN(neuron_id_by_account): /\ account' = [account EXCEPT ![self] = aid] /\ neuron_id' = [neuron_id EXCEPT ![self] = FRESH_NEURON_ID(DOMAIN(neuron))] - /\ Assert(neuron_id'[self] \notin locks, - "Failure of assertion at line 72, column 13.") + /\ Assert(neuron_id'[self] \notin locks, + "Failure of assertion at line 73, column 13.") /\ locks' = (locks \union {neuron_id'[self]}) /\ neuron_id_by_account' = (account'[self] :> neuron_id'[self] @@ neuron_id_by_account) /\ neuron' = (neuron_id'[self] :> [ cached_stake |-> 0, account |-> account'[self], fees |-> 0, maturity |-> 0 ] @@ neuron) /\ governance_to_ledger' = Append(governance_to_ledger, request(self, account_balance(account'[self]))) /\ pc' = [pc EXCEPT ![self] = "WaitForBalanceQuery"] - /\ UNCHANGED ledger_to_governance + /\ UNCHANGED << ledger_to_governance, spawning_neurons >> WaitForBalanceQuery(self) == /\ pc[self] = "WaitForBalanceQuery" /\ \E answer \in { resp \in ledger_to_governance : resp.caller = self }: @@ -152,7 +154,8 @@ WaitForBalanceQuery(self) == /\ pc[self] = "WaitForBalanceQuery" /\ account' = [account EXCEPT ![self] = DUMMY_ACCOUNT] /\ neuron_id' = [neuron_id EXCEPT ![self] = 0] /\ pc' = [pc EXCEPT ![self] = "Done"] - /\ UNCHANGED governance_to_ledger + /\ UNCHANGED << governance_to_ledger, + spawning_neurons >> Claim_Neuron(self) == ClaimNeuron1(self) \/ WaitForBalanceQuery(self) @@ -167,6 +170,6 @@ Spec == Init /\ [][Next]_vars Termination == <>(\A self \in ProcSet: pc[self] = "Done") -\* END TRANSLATION +\* END TRANSLATION ==== diff --git a/rs/nns/governance/tla/Claim_Neuron_Apalache.tla b/rs/nns/governance/tla/Claim_Neuron_Apalache.tla index 57a0126ebfb..3e4c474dfea 100644 --- a/rs/nns/governance/tla/Claim_Neuron_Apalache.tla +++ b/rs/nns/governance/tla/Claim_Neuron_Apalache.tla @@ -52,7 +52,11 @@ VARIABLES \* @type: $proc -> Int; neuron_id, \* @type: $proc -> $account; - account + account, + \* Not used by this model, but it's a global variable used by spawn_neurons, so + \* it's the easiest to just add it to all the other models + \* @type: Bool; + spawning_neurons \* @type: Set($neuronId) => $neuronId; FRESH_NEURON_ID(existing_neurons) == CHOOSE nid \in (Neuron_Ids \ existing_neurons): TRUE diff --git a/rs/nns/governance/tla/Merge_Neurons_Apalache.tla b/rs/nns/governance/tla/Merge_Neurons_Apalache.tla index 4e029383977..cf51b4f4a4c 100644 --- a/rs/nns/governance/tla/Merge_Neurons_Apalache.tla +++ b/rs/nns/governance/tla/Merge_Neurons_Apalache.tla @@ -38,7 +38,11 @@ VARIABLES \* @type: $proc -> Int; fees_amount, \* @type: $proc -> Int; - amount_to_target + amount_to_target, + \* Not used by this model, but it's a global variable used by spawn_neurons, so + \* it's the easiest to just add it to all the other models + \* @type: Bool; + spawning_neurons \* @type: Set($neuronId) => $neuronId; FRESH_NEURON_ID(existing_neurons) == CHOOSE nid \in (Neuron_Ids \ existing_neurons): TRUE diff --git a/rs/nns/governance/tla/Spawn_Neuron.tla b/rs/nns/governance/tla/Spawn_Neuron.tla new file mode 100644 index 00000000000..2ff261899a4 --- /dev/null +++ b/rs/nns/governance/tla/Spawn_Neuron.tla @@ -0,0 +1,107 @@ +---- MODULE Spawn_Neuron ---- +EXTENDS TLC, Sequences, Naturals, FiniteSets, Variants + +CONSTANTS + \* @type: Set($proc); + Spawn_Neuron_Process_Ids, + \* @type: Set($account); + Governance_Account_Ids, + \* @type: $account; + Minting_Account_Id + +\* Constants from the actual code +CONSTANTS + \* Minimum stake a neuron can have + \* @type: Int; + MIN_STAKE, + \* The transfer fee charged by the ledger canister + \* @type: Int; + TRANSACTION_FEE + +CONSTANT + FRESH_NEURON_ID(_) + +(* --algorithm Governance_Ledger_Spawn_Neuron { + +variables + neuron \in [{} -> {}]; + neuron_id_by_account \in [{} -> {}]; + locks = {}; + governance_to_ledger = <<>>; + ledger_to_governance = {}; + +\* Since spawn_neuron always executes in a single message handler, there's no real need +\* to support multiple procesees (i.e., we could've had `Spasn_Neuron = Spawn_Neuron_Process_Id` +\* here instead of choosing from the set of `Spawn_Neuron_Process_Ids`). But there's no significant +\* upside to that over using a singleton Spawn_Neuron_Process_Ids set, and the TLA-code link tooling +\* expects a process to take a `self` parameter, which is only the case if we use a set of process ids. +\* So we just do the easier thing here. +process (Spawn_Neuron \in Spawn_Neuron_Process_Ids) + { + SpawnNeuronStart: + while(TRUE) { + \* A few checks are skipped here: + \* 1. that the heap can grow + \* 2. that the caller controls the parent neuron + \* 3. That the child controller is valid + \* As these can fail in the implementation, and we are checking that the implementation is aligned + \* with the model, the model also needs to "fail" the spawn operation due to these checks failing. + \* We model this by non-deterministically making Spawn_Neuron into a no-op. + either { + skip; + } or { + with(parent_neuron_id \in DOMAIN(neuron) \ locks; + child_account_id \in Governance_Account_Ids \ DOMAIN neuron_id_by_account; + maturity_to_spawn \in MIN_STAKE..neuron[parent_neuron_id].maturity; + child_neuron_id = FRESH_NEURON_ID(DOMAIN(neuron)); + ) { + + \* The code takes a lock on the child neuron, but releases it in the same message handler, + \* effectively only checking that the lock isn't already taken. + await child_neuron_id \notin locks; + + neuron_id_by_account := child_account_id :> child_neuron_id @@ neuron_id_by_account; + neuron := child_neuron_id :> [ cached_stake |-> 0, account |-> child_account_id, fees |-> 0, maturity |-> maturity_to_spawn ] + @@ [ neuron EXCEPT ![parent_neuron_id].maturity = @ - maturity_to_spawn ]; + }; + }; + } + } + +} *) +\* BEGIN TRANSLATION (chksum(pcal) = "8e28ba76" /\ chksum(tla) = "e2a72833") +VARIABLES neuron, neuron_id_by_account, locks, governance_to_ledger, + ledger_to_governance + +vars == << neuron, neuron_id_by_account, locks, governance_to_ledger, + ledger_to_governance >> + +ProcSet == (Spawn_Neuron_Process_Ids) + +Init == (* Global variables *) + /\ neuron \in [{} -> {}] + /\ neuron_id_by_account \in [{} -> {}] + /\ locks = {} + /\ governance_to_ledger = <<>> + /\ ledger_to_governance = {} + +Spawn_Neuron(self) == /\ \/ /\ TRUE + /\ UNCHANGED <> + \/ /\ \E parent_neuron_id \in DOMAIN(neuron) \ locks: + \E child_account_id \in Governance_Account_Ids \ DOMAIN neuron_id_by_account: + \E maturity_to_spawn \in MIN_STAKE..neuron[parent_neuron_id].maturity: + LET child_neuron_id == FRESH_NEURON_ID(DOMAIN(neuron)) IN + /\ child_neuron_id \notin locks + /\ neuron_id_by_account' = (child_account_id :> child_neuron_id @@ neuron_id_by_account) + /\ neuron' = ( child_neuron_id :> [ cached_stake |-> 0, account |-> child_account_id, fees |-> 0, maturity |-> maturity_to_spawn ] + @@ [ neuron EXCEPT ![parent_neuron_id].maturity = @ - maturity_to_spawn ]) + /\ UNCHANGED << locks, governance_to_ledger, + ledger_to_governance >> + +Next == (\E self \in Spawn_Neuron_Process_Ids: Spawn_Neuron(self)) + +Spec == Init /\ [][Next]_vars + +\* END TRANSLATION + +==== diff --git a/rs/nns/governance/tla/Spawn_Neuron_Apalache.tla b/rs/nns/governance/tla/Spawn_Neuron_Apalache.tla new file mode 100644 index 00000000000..b966ea31979 --- /dev/null +++ b/rs/nns/governance/tla/Spawn_Neuron_Apalache.tla @@ -0,0 +1,64 @@ +---- MODULE Spawn_Neuron_Apalache ---- + +EXTENDS TLC, Variants + +(* +@typeAlias: proc = Str; +@typeAlias: account = Str; +@typeAlias: neuronId = Int; +@typeAlias: methodCall = Transfer({ from: $account, to: $account, amount: Int, fee: Int}) | AccountBalance({ account_id: $account }); +@typeAlias: methodResponse = Fail(UNIT) | TransferOk(UNIT) | BalanceQueryOk(Int); +*) +_type_alias_dummy == TRUE + +\* This marker is necessary for the code link tooling to insert the constants +\* CODE_LINK_INSERT_CONSTANTS + +(* +CONSTANTS + \* @type: Set($account); + Governance_Account_Ids, + \* @type: $account; + Minting_Account_Id, + \* @type: Set($neuronId); + Neuron_Ids + +CONSTANTS + \* @type: Set($proc); + Spawn_Neuron_Process_Ids + +CONSTANTS + \* Minimum stake a neuron can have + \* @type: Int; + MIN_STAKE, + \* The transfer fee charged by the ledger canister + \* @type: Int; + TRANSACTION_FEE +*) + +VARIABLES + \* @type: $neuronId -> {cached_stake: Int, account : $account, maturity: Int, fees: Int}; + neuron, + \* @type: $account -> $neuronId; + neuron_id_by_account, + \* @type: Set($neuronId); + locks, + \* @type: Seq({caller : $proc, method_and_args: $methodCall }); + governance_to_ledger, + \* @type: Set({caller: $proc, response: $methodResponse }); + ledger_to_governance, + \* @type: $proc -> Str; + pc, + \* Not used by this model, but it's a global variable used by spawn_neurons, so + \* it's the easiest to just add it to all the other models + \* @type: Bool; + spawning_neurons + +\* @type: Set($neuronId) => $neuronId; +FRESH_NEURON_ID(existing_neurons) == CHOOSE nid \in (Neuron_Ids \ existing_neurons): TRUE + +MOD == INSTANCE Spawn_Neuron + +Next == [MOD!Next]_MOD!vars + +==== diff --git a/rs/nns/governance/tla/Spawn_Neurons.tla b/rs/nns/governance/tla/Spawn_Neurons.tla new file mode 100644 index 00000000000..737e398fa5e --- /dev/null +++ b/rs/nns/governance/tla/Spawn_Neurons.tla @@ -0,0 +1,187 @@ +---- MODULE Spawn_Neurons ---- +EXTENDS TLC, Sequences, Naturals, FiniteSets, Variants + +CONSTANTS + \* @type: Set($proc); + Spawn_Neurons_Process_Ids, + \* @type: Set($account); + Governance_Account_Ids, + \* @type: $account; + Minting_Account_Id + +\* Constants from the actual code +CONSTANTS + \* Minimum stake a neuron can have + \* @type: Int; + MIN_STAKE, + \* The transfer fee charged by the ledger canister + \* @type: Int; + TRANSACTION_FEE, + \* @type: Int; + MATURITY_BASIS_POINTS + +BASIS_POINTS_PER_UNITY == 10000 + +request(caller, request_args) == [caller |-> caller, method_and_args |-> request_args] +transfer(from, to, amount, fee) == Variant("Transfer", [from |-> from, to |-> to, amount |-> amount, fee |-> fee]) + +(*--algorithm Governance_Ledger_Spawn_Neurons { + +variables + neuron \in [{} -> {}]; + neuron_id_by_account \in [{} -> {}]; + locks = {}; + governance_to_ledger = <<>>; + ledger_to_governance = {}; + spawning_neurons = TRUE; + +\* The Rust code of spawn_neurons (called in the timer) uses a for loop with an await inside. +\* This is awkward to model in PlusCal while preserving the 1-1 mapping of TLA transitions +\* to code message handlers (a while loop would require some extra labels). +\* A 1-1 mapping, as far as I can tell, requires some code duplication. +\* We extract the duplicated part into a macro, and place the labels appropriately. +\* In some cases we'll want to update the locks twice in the same message handler, once +\* in the macro, and once before invoking the macro. To work around PlusCal not being +\* able to do that, we'll pass the new value of locks as a parameter to the macro. +macro loop_iteration(new_locks) { + with(nid \in ready_to_spawn_ids \ locks; + \* We need to manually disambiguate the precedence between * and \div here + neuron_stake = (neuron[nid].maturity * (BASIS_POINTS_PER_UNITY + MATURITY_BASIS_POINTS)) \div BASIS_POINTS_PER_UNITY; + account = neuron[nid].account; + ) { + neuron_id := nid; + locks := new_locks \union {neuron_id}; + neuron := [ neuron EXCEPT + ![neuron_id].maturity = 0, + ![neuron_id].cached_stake = neuron_stake + ]; + governance_to_ledger := Append(governance_to_ledger, + request(self, transfer(Minting_Account_Id, account, neuron_stake, 0))); + goto WaitForTransfer; + }; +} + +process (Spawn_Neurons \in Spawn_Neurons_Process_Ids) + variables + neuron_id = 0; + ready_to_spawn_ids = {}; + { + + SpawnNeurons_Start: + await ~spawning_neurons; + + \* TODO: probably need to model the spawning state + ready_to_spawn_ids := {nid \in DOMAIN(neuron) : neuron[nid].maturity > 0}; + await ready_to_spawn_ids # {}; + spawning_neurons := TRUE; + loop_iteration(locks); + WaitForTransfer: + with(answer \in { resp \in ledger_to_governance: resp.caller = self }; + \* Work around PlusCal not being able to assing to the same variable twice in the same block + new_locks = IF answer.response # Variant("Fail", UNIT) + THEN locks \ {neuron_id} + ELSE locks; + ) { + ledger_to_governance := ledger_to_governance \ {answer}; + + ready_to_spawn_ids := ready_to_spawn_ids \ {neuron_id}; + if(ready_to_spawn_ids = {}) { + spawning_neurons := FALSE; + locks := new_locks; + neuron_id := 0; + goto SpawnNeurons_Start; + } else { + loop_iteration(new_locks); + }; + }; + } + +} +*) +\* BEGIN TRANSLATION (chksum(pcal) = "21b445f4" /\ chksum(tla) = "5862c78c") +VARIABLES pc, neuron, neuron_id_by_account, locks, governance_to_ledger, + ledger_to_governance, spawning_neurons, neuron_id, + ready_to_spawn_ids + +vars == << pc, neuron, neuron_id_by_account, locks, governance_to_ledger, + ledger_to_governance, spawning_neurons, neuron_id, + ready_to_spawn_ids >> + +ProcSet == (Spawn_Neurons_Process_Ids) + +Init == (* Global variables *) + /\ neuron \in [{} -> {}] + /\ neuron_id_by_account \in [{} -> {}] + /\ locks = {} + /\ governance_to_ledger = <<>> + /\ ledger_to_governance = {} + /\ spawning_neurons = TRUE + (* Process Spawn_Neurons *) + /\ neuron_id = [self \in Spawn_Neurons_Process_Ids |-> 0] + /\ ready_to_spawn_ids = [self \in Spawn_Neurons_Process_Ids |-> {}] + /\ pc = [self \in ProcSet |-> "SpawnNeurons_Start"] + +SpawnNeurons_Start(self) == /\ pc[self] = "SpawnNeurons_Start" + /\ ~spawning_neurons + /\ ready_to_spawn_ids' = [ready_to_spawn_ids EXCEPT ![self] = {nid \in DOMAIN(neuron) : neuron[nid].maturity > 0}] + /\ ready_to_spawn_ids'[self] # {} + /\ spawning_neurons' = TRUE + /\ \E nid \in ready_to_spawn_ids'[self] \ locks: + LET neuron_stake == (neuron[nid].maturity * (BASIS_POINTS_PER_UNITY + MATURITY_BASIS_POINTS)) \div BASIS_POINTS_PER_UNITY IN + LET account == neuron[nid].account IN + /\ neuron_id' = [neuron_id EXCEPT ![self] = nid] + /\ locks' = (locks \union {neuron_id'[self]}) + /\ neuron' = [ neuron EXCEPT + ![neuron_id'[self]].maturity = 0, + ![neuron_id'[self]].cached_stake = neuron_stake + ] + /\ governance_to_ledger' = Append(governance_to_ledger, + request(self, transfer(Minting_Account_Id, account, neuron_stake, 0))) + /\ pc' = [pc EXCEPT ![self] = "WaitForTransfer"] + /\ UNCHANGED << neuron_id_by_account, + ledger_to_governance >> + +WaitForTransfer(self) == /\ pc[self] = "WaitForTransfer" + /\ \E answer \in { resp \in ledger_to_governance: resp.caller = self }: + LET new_locks == IF answer.response # Variant("Fail", UNIT) + THEN locks \ {neuron_id[self]} + ELSE locks IN + /\ ledger_to_governance' = ledger_to_governance \ {answer} + /\ ready_to_spawn_ids' = [ready_to_spawn_ids EXCEPT ![self] = ready_to_spawn_ids[self] \ {neuron_id[self]}] + /\ IF ready_to_spawn_ids'[self] = {} + THEN /\ spawning_neurons' = FALSE + /\ locks' = new_locks + /\ neuron_id' = [neuron_id EXCEPT ![self] = 0] + /\ pc' = [pc EXCEPT ![self] = "SpawnNeurons_Start"] + /\ UNCHANGED << neuron, + governance_to_ledger >> + ELSE /\ \E nid \in ready_to_spawn_ids'[self] \ locks: + LET neuron_stake == (neuron[nid].maturity * (BASIS_POINTS_PER_UNITY + MATURITY_BASIS_POINTS)) \div BASIS_POINTS_PER_UNITY IN + LET account == neuron[nid].account IN + /\ neuron_id' = [neuron_id EXCEPT ![self] = nid] + /\ locks' = (new_locks \union {neuron_id'[self]}) + /\ neuron' = [ neuron EXCEPT + ![neuron_id'[self]].maturity = 0, + ![neuron_id'[self]].cached_stake = neuron_stake + ] + /\ governance_to_ledger' = Append(governance_to_ledger, + request(self, transfer(Minting_Account_Id, account, neuron_stake, 0))) + /\ pc' = [pc EXCEPT ![self] = "WaitForTransfer"] + /\ UNCHANGED spawning_neurons + /\ UNCHANGED neuron_id_by_account + +Spawn_Neurons(self) == SpawnNeurons_Start(self) \/ WaitForTransfer(self) + +(* Allow infinite stuttering to prevent deadlock on termination. *) +Terminating == /\ \A self \in ProcSet: pc[self] = "Done" + /\ UNCHANGED vars + +Next == (\E self \in Spawn_Neurons_Process_Ids: Spawn_Neurons(self)) + \/ Terminating + +Spec == Init /\ [][Next]_vars + +Termination == <>(\A self \in ProcSet: pc[self] = "Done") + +\* END TRANSLATION +==== diff --git a/rs/nns/governance/tla/Spawn_Neurons_Apalache.tla b/rs/nns/governance/tla/Spawn_Neurons_Apalache.tla new file mode 100644 index 00000000000..cf01c6171ac --- /dev/null +++ b/rs/nns/governance/tla/Spawn_Neurons_Apalache.tla @@ -0,0 +1,63 @@ +---- MODULE Spawn_Neurons_Apalache ---- + +EXTENDS TLC, Variants + +(* +@typeAlias: proc = Str; +@typeAlias: account = Str; +@typeAlias: neuronId = Int; +@typeAlias: methodCall = Transfer({ from: $account, to: $account, amount: Int, fee: Int}) | AccountBalance({ account_id: $account }); +@typeAlias: methodResponse = Fail(UNIT) | TransferOk(UNIT) | BalanceQueryOk(Int); +*) +_type_alias_dummy == TRUE + +\* This marker is necessary for the code link tooling to insert the constants +\* CODE_LINK_INSERT_CONSTANTS + +(* +CONSTANTS + \* @type: Set($account); + Governance_Account_Ids, + \* @type: $account; + Minting_Account_Id, + \* @type: Set($neuronId); + Neuron_Ids + +CONSTANTS + \* @type: Set($proc); + Spawn_Neurons_Process_Ids + +CONSTANTS + \* Minimum stake a neuron can have + \* @type: Int; + MIN_STAKE, + \* The transfer fee charged by the ledger canister + \* @type: Int; + TRANSACTION_FEE +*) + +VARIABLES + \* @type: $neuronId -> {cached_stake: Int, account : $account, maturity: Int, fees: Int}; + neuron, + \* @type: $account -> $neuronId; + neuron_id_by_account, + \* @type: Set($neuronId); + locks, + \* @type: Seq({caller : $proc, method_and_args: $methodCall }); + governance_to_ledger, + \* @type: Set({caller: $proc, response: $methodResponse }); + ledger_to_governance, + \* @type: $proc -> Str; + pc, + \* @type: Bool; + spawning_neurons, + \* @type: $proc -> $neuronId; + neuron_id, + \* @type: $proc -> Set($neuronId); + ready_to_spawn_ids + +MOD == INSTANCE Spawn_Neurons + +Next == [MOD!Next]_MOD!vars + +==== diff --git a/rs/nns/governance/tla/Split_Neuron_Apalache.tla b/rs/nns/governance/tla/Split_Neuron_Apalache.tla index 65cd78256f2..e401cbd5523 100644 --- a/rs/nns/governance/tla/Split_Neuron_Apalache.tla +++ b/rs/nns/governance/tla/Split_Neuron_Apalache.tla @@ -1,6 +1,6 @@ This module adds Apalache type annotations and constant instantiations to the TLA model of split_neuron. -We can't add the annotations to Split_Neuron.tla (which contains the actual transition predicate) +We can't add the annotations to Split_Neuron.tla (which contains the actual transition predicate) because we use PlusCal. PlusCal autogenerates the TLA+ code, which means that every change to the PlusCal code would overwrite the annotations on the VARIABLES. @@ -24,21 +24,21 @@ _type_alias_dummy == TRUE \* CODE_LINK_INSERT_CONSTANTS (* -CONSTANTS +CONSTANTS \* @type: Set($account); - Account_Ids, + Account_Ids, \* @type: Set($account); - Governance_Account_Ids, + Governance_Account_Ids, \* @type: $account; Minting_Account_Id, \* @type: Set($neuronId); Neuron_Ids -CONSTANTS +CONSTANTS \* @type: Set($proc); Split_Neuron_Process_Ids -CONSTANTS +CONSTANTS \* Minimum stake a neuron can have \* @type: Int; MIN_STAKE, @@ -67,7 +67,11 @@ VARIABLES \* @type: $proc -> Int; sn_child_neuron_id, \* @type: $proc -> $account; - sn_child_account_id + sn_child_account_id, + \* Not used by this model, but it's a global variable used by spawn_neurons, so + \* it's the easiest to just add it to all the other models + \* @type: Bool; + spawning_neurons \* @type: Set($neuronId) => $neuronId; FRESH_NEURON_ID(existing_neurons) == CHOOSE nid \in (Neuron_Ids \ existing_neurons): TRUE @@ -76,4 +80,4 @@ MOD == INSTANCE Split_Neuron Next == [MOD!Next]_MOD!vars -==== \ No newline at end of file +==== diff --git a/rs/tla_instrumentation/tla_instrumentation/src/checker.rs b/rs/tla_instrumentation/tla_instrumentation/src/checker.rs index c71de8ed423..248e680cc1a 100644 --- a/rs/tla_instrumentation/tla_instrumentation/src/checker.rs +++ b/rs/tla_instrumentation/tla_instrumentation/src/checker.rs @@ -45,6 +45,7 @@ impl std::fmt::Debug for ApalacheError { } pub struct TlaCheckError { + pub model: PathBuf, pub apalache_error: ApalacheError, pub pair: ResolvedStatePair, pub constants: TlaConstantAssignment, @@ -218,6 +219,7 @@ pub fn check_tla_code_link( .collect(), ) .map_err(|e| TlaCheckError { + model: predicate.tla_module, apalache_error: e, pair: state_pair, constants, diff --git a/rs/tla_instrumentation/tla_instrumentation/src/tla_state.rs b/rs/tla_instrumentation/tla_instrumentation/src/tla_state.rs index d2fbd2b3611..c936d2d0479 100644 --- a/rs/tla_instrumentation/tla_instrumentation/src/tla_state.rs +++ b/rs/tla_instrumentation/tla_instrumentation/src/tla_state.rs @@ -1,5 +1,5 @@ use crate::tla_value::{TlaValue, ToTla}; -use crate::SourceLocation; +use crate::{Diff, SourceLocation}; use candid::CandidType; use std::{ collections::{BTreeMap, BTreeSet}, @@ -184,6 +184,32 @@ pub struct ResolvedStatePair { pub end_source_location: SourceLocation, } +impl ResolvedStatePair { + /// Returns a list of fields that differ between the start and end states + /// The difference is fine-grained, so if a field is a (potentially nested) record or a function, + /// the difference lists just the fields that differ (respectively, the argument/value pairs that differ) + pub fn diff(&self) -> Vec<(String, Diff)> { + let mut diff = vec![]; + let start = &self.start.0; + let end = &self.end.0; + for (key, value) in start.0.iter() { + if let Some(end_value) = end.0.get(key) { + if let Some(d) = value.diff(end_value) { + diff.push((key.clone(), d)); + } + } else { + diff.push((key.clone(), Diff::Other(Some(value.clone()), None))); + } + } + for (key, value) in end.0.iter() { + if !start.0.contains_key(key) { + diff.push((key.clone(), Diff::Other(None, Some(value.clone())))); + } + } + diff + } +} + fn resolve_local_variable(name: &str, value: &TlaValue, process_id: &str) -> VarAssignment { let mut assignment = VarAssignment::new(); assignment.push( diff --git a/rs/tla_instrumentation/tla_instrumentation/src/tla_value.rs b/rs/tla_instrumentation/tla_instrumentation/src/tla_value.rs index f346cf85728..58572889f38 100644 --- a/rs/tla_instrumentation/tla_instrumentation/src/tla_value.rs +++ b/rs/tla_instrumentation/tla_instrumentation/src/tla_value.rs @@ -1,4 +1,4 @@ -use candid::{CandidType, Nat, Principal}; +use candid::{CandidType, Int as CInt, Nat, Principal}; use std::collections::{BTreeMap, BTreeSet, HashMap}; use std::{ fmt, @@ -14,10 +14,19 @@ pub enum TlaValue { Literal(String), Constant(String), Bool(bool), - Int(Nat), + Int(CInt), Variant { tag: String, value: Box }, } +#[derive(Clone, Debug)] +pub enum Diff { + /// For records and functions, have a fine-grained diff + RecordDiff(HashMap>), + FunctionDiff(HashMap>), + /// For other value types, just record the difference + Other(Option, Option), +} + impl TlaValue { /// An approximation of the size of the TLA value, in terms of the number of atoms. /// Ignores string lengths or number sizes. @@ -34,6 +43,84 @@ impl TlaValue { TlaValue::Variant { tag: _, value } => 1 + value.size(), } } + + /// Returns a list of fields that differ between this value and the other one + /// The difference is fine-grained, so if a field is a (potentially nested) record or a function, + /// the difference lists just the fields that differ (respectively, the argument/value pairs that differ) + pub fn diff(&self, other: &TlaValue) -> Option { + if self == other { + return None; + } + match (self, other) { + (TlaValue::Record(map1), TlaValue::Record(map2)) => { + let mut diff = vec![]; + for (k, v1) in map1 { + if let Some(v2) = map2.get(k) { + let sub_diff = v1.diff(v2); + match sub_diff { + Some(Diff::RecordDiff(m)) => { + diff.extend( + m.into_iter().map(|(k2, dv)| (format!("{}.{}", k, k2), dv)), + ); + } + Some(Diff::FunctionDiff(m)) => { + diff.extend( + m.into_iter() + .map(|(k2, dv)| (format!("{}[{:?}]", k, k2), dv)), + ); + } + Some(d @ Diff::Other(_, _)) => { + diff.push((k.clone(), Box::new(d))); + } + None => {} + } + } else { + diff.push((k.clone(), Box::new(Diff::Other(Some(v1.clone()), None)))); + } + } + for (k, v2) in map2 { + if !map1.contains_key(k) { + diff.push((k.clone(), Box::new(Diff::Other(None, Some(v2.clone()))))); + } + } + if diff.is_empty() { + None + } else { + Some(Diff::RecordDiff(diff.into_iter().collect())) + } + } + (TlaValue::Function(map1), TlaValue::Function(map2)) => { + let mut diff = vec![]; + for (k, v1) in map1 { + if let Some(v2) = map2.get(k) { + let sub_diff = v1.diff(v2); + if let Some(d) = sub_diff { + diff.push((k.clone(), Box::new(d))); + } + } else { + diff.push((k.clone(), Box::new(Diff::Other(Some(v1.clone()), None)))); + } + } + for (k, v2) in map2 { + if !map1.contains_key(k) { + diff.push((k.clone(), Box::new(Diff::Other(None, Some(v2.clone()))))); + } + } + if diff.is_empty() { + None + } else { + Some(Diff::FunctionDiff(diff.into_iter().collect())) + } + } + (val1, val2) => { + if val1 == val2 { + None + } else { + Some(Diff::Other(Some(val1.clone()), Some(val2.clone()))) + } + } + } + } } impl Display for TlaValue { @@ -161,12 +248,24 @@ impl ToTla for u64 { } } -impl ToTla for Nat { +impl ToTla for i32 { + fn to_tla_value(&self) -> TlaValue { + TlaValue::Int((*self).into()) + } +} + +impl ToTla for CInt { fn to_tla_value(&self) -> TlaValue { TlaValue::Int(self.clone()) } } +impl ToTla for Nat { + fn to_tla_value(&self) -> TlaValue { + TlaValue::Int(self.clone().into()) + } +} + impl ToTla for BTreeMap { fn to_tla_value(&self) -> TlaValue { TlaValue::Function( diff --git a/rs/tla_instrumentation/tla_instrumentation/tests/multiple_calls.rs b/rs/tla_instrumentation/tla_instrumentation/tests/multiple_calls.rs index a6e0f0e189f..d97215e7f82 100644 --- a/rs/tla_instrumentation/tla_instrumentation/tests/multiple_calls.rs +++ b/rs/tla_instrumentation/tla_instrumentation/tests/multiple_calls.rs @@ -21,7 +21,7 @@ mod tla_stuff { use crate::StructCanister; use std::collections::BTreeSet; - use candid::Nat; + use candid::Int; pub const PID: &str = "Multiple_Calls"; pub const CAN_NAME: &str = "mycan"; @@ -71,14 +71,14 @@ mod tla_stuff { Some(TlaValue::Int(start_counter)), Some(TlaValue::Int(end_counter)), ) => start_counter.max(end_counter).clone(), - _ => Nat::from(0_u64), + _ => Int::from(0_u64), }, ) .max(); let constants = BTreeMap::from([ ( "MAX_COUNTER".to_string(), - max_counter.unwrap_or(Nat::from(0_u64)).to_tla_value(), + max_counter.unwrap_or(Int::from(0_u64)).to_tla_value(), ), ( "My_Method_Process_Ids".to_string(), diff --git a/rs/tla_instrumentation/tla_instrumentation/tests/structs.rs b/rs/tla_instrumentation/tla_instrumentation/tests/structs.rs index 27843d7e0a8..084462f9c34 100644 --- a/rs/tla_instrumentation/tla_instrumentation/tests/structs.rs +++ b/rs/tla_instrumentation/tla_instrumentation/tests/structs.rs @@ -21,7 +21,7 @@ mod tla_stuff { use crate::StructCanister; use std::collections::BTreeSet; - use candid::Nat; + use candid::Int; pub const PID: &str = "Counter"; pub const CAN_NAME: &str = "mycan"; @@ -71,14 +71,14 @@ mod tla_stuff { Some(TlaValue::Int(start_counter)), Some(TlaValue::Int(end_counter)), ) => start_counter.max(end_counter).clone(), - _ => Nat::from(0_u64), + _ => Int::from(0_u64), }, ) .max(); let constants = BTreeMap::from([ ( "MAX_COUNTER".to_string(), - max_counter.unwrap_or(Nat::from(0_u64)).to_tla_value(), + max_counter.unwrap_or(Int::from(0_u64)).to_tla_value(), ), ( "My_Method_Process_Ids".to_string(), diff --git a/rs/tla_instrumentation/tla_instrumentation_proc_macros/src/lib.rs b/rs/tla_instrumentation/tla_instrumentation_proc_macros/src/lib.rs index 2d466ad0f3d..0935869facf 100644 --- a/rs/tla_instrumentation/tla_instrumentation_proc_macros/src/lib.rs +++ b/rs/tla_instrumentation/tla_instrumentation_proc_macros/src/lib.rs @@ -120,15 +120,49 @@ pub fn tla_update_method(attr: TokenStream, item: TokenStream) -> TokenStream { let asyncness = sig.asyncness; - let output = if asyncness.is_some() { + let invocation = if asyncness.is_some() { + quote! { { + let mut pinned = Box::pin(TLA_INSTRUMENTATION_STATE.scope( + tla_instrumentation::InstrumentationState::new(update.clone(), globals, snapshotter, start_location), + async move { + let res = self.#mangled_name(#(#args),*).await; + let globals = tla_get_globals!(self); + let state: InstrumentationState = TLA_INSTRUMENTATION_STATE.get(); + let mut handler_state = state.handler_state.borrow_mut(); + let state_pair = tla_instrumentation::log_method_return(&mut handler_state, globals, end_location); + let mut state_pairs = state.state_pairs.borrow_mut(); + state_pairs.push(state_pair); + res + } + )); + let res = pinned.as_mut().await; + let trace = pinned.as_mut().take_value().expect("No TLA trace in the future!"); + let pairs = trace.state_pairs.borrow_mut().clone(); + (pairs, res) + } } + } else { + quote! { + TLA_INSTRUMENTATION_STATE.sync_scope( + tla_instrumentation::InstrumentationState::new(update.clone(), globals, snapshotter, start_location), + || { + let res = self.#mangled_name(#(#args),*); + let globals = tla_get_globals!(self); + let state: InstrumentationState = TLA_INSTRUMENTATION_STATE.get(); + let mut handler_state = state.handler_state.borrow_mut(); + let state_pair = tla_instrumentation::log_method_return(&mut handler_state, globals, end_location); + let mut state_pairs = state.state_pairs.borrow_mut(); + state_pairs.push(state_pair); + (state_pairs.clone(), res) + } + ) + } + }; + + let output = { quote! { #modified_fn #(#attrs)* #vis #sig { - // Fail the compilation if we're not in debug mode - // #[cfg(not(debug_assertions))] - // let i:u32 = "abc"; - use std::cell::RefCell; use std::rc::Rc; @@ -138,24 +172,10 @@ pub fn tla_update_method(attr: TokenStream, item: TokenStream) -> TokenStream { let update = #attr2; let start_location = tla_instrumentation::SourceLocation { file: "Unknown file".to_string(), line: format!("Start of {}", #original_name) }; let end_location = tla_instrumentation::SourceLocation { file: "Unknown file".to_string(), line: format!("End of {}", #original_name) }; - let mut pinned = Box::pin(TLA_INSTRUMENTATION_STATE.scope( - tla_instrumentation::InstrumentationState::new(update.clone(), globals, snapshotter, start_location), - async move { - let res = self.#mangled_name(#(#args),*).await; - let globals = tla_get_globals!(self); - let state: InstrumentationState = TLA_INSTRUMENTATION_STATE.get(); - let mut handler_state = state.handler_state.borrow_mut(); - let state_pair = tla_instrumentation::log_method_return(&mut handler_state, globals, end_location); - let mut state_pairs = state.state_pairs.borrow_mut(); - state_pairs.push(state_pair); - res - } - )); - let res = pinned.as_mut().await; - let trace = pinned.as_mut().take_value().expect("No TLA trace in the future!"); - let mut pairs = trace.state_pairs.borrow_mut().clone(); + let (mut pairs, res) = #invocation; + let constants = (update.post_process)(&mut pairs); - // println!("State pairs in the expanded macro: {:?}", pairs); + let trace = tla_instrumentation::UpdateTrace { update, state_pairs: pairs, @@ -174,23 +194,6 @@ pub fn tla_update_method(attr: TokenStream, item: TokenStream) -> TokenStream { res } } - } else { - quote! { - #modified_fn - - #(#attrs)* #vis #sig { - // Fail the compilation if we're not in debug mode - #[cfg(not(debug_assertions))] - let i:u32 = "abc"; - - let globals = tla_get_globals!(); - tla_instrumentation::tla_log_method_call!(#attr2, globals); - let res = #mangled_name(#(#args),*); - let globals = tla_get_globals!(); - tla_instrumentation::tla_log_method_return!(globals); - res - } - } }; output.into() From 05c96cbd11ea38b68d28a4f074345e1ebff2eb0d Mon Sep 17 00:00:00 2001 From: Leo Eichhorn <99166915+eichhorl@users.noreply.github.com> Date: Thu, 5 Dec 2024 15:19:44 +0100 Subject: [PATCH 22/23] chore: CRP-2614 Rename IDKG test functions and struct fields in Network and SubnetTopology (#2741) This PR updates some functions and types from "IDKG" naming to a more general "Chain Key" naming, which is also applicable for VetKd. As only the names are altered, the protobuf changes are backwards/forwards compatible. Therefore it is safe to use `CI_OVERRIDE_BUF_BREAKING` in this PR. --- packages/pocket-ic/tests/tests.rs | 2 +- rs/canonical_state/src/traversal.rs | 4 +- .../benches/management_canister/ecdsa.rs | 2 +- .../src/execution_environment.rs | 4 +- .../src/scheduler/test_utilities.rs | 29 ++++---- .../src/scheduler/tests.rs | 10 +-- .../tests/execution_test.rs | 2 +- .../tests/subnet_size_test.rs | 2 +- .../tests/threshold_signatures.rs | 24 +++---- rs/http_endpoints/public/tests/common/mod.rs | 2 +- rs/messaging/src/message_routing.rs | 8 +-- rs/messaging/src/message_routing/tests.rs | 38 +++++------ rs/messaging/src/state_machine/tests.rs | 2 +- rs/pocket_ic_server/src/pocket_ic.rs | 4 +- .../def/state/metadata/v1/metadata.proto | 6 +- .../src/gen/state/state.metadata.v1.rs | 7 +- rs/replicated_state/src/metadata_state.rs | 48 ++++++------- .../src/metadata_state/tests.rs | 6 +- rs/state_machine_tests/src/lib.rs | 46 ++++++------- rs/state_manager/tests/state_manager.rs | 2 +- rs/system_api/src/routing.rs | 68 +++++++++---------- .../execution_environment/src/lib.rs | 24 +++---- rs/test_utilities/state/src/lib.rs | 2 +- .../tecdsa_signature_life_cycle_test.rs | 8 +-- 24 files changed, 177 insertions(+), 173 deletions(-) diff --git a/packages/pocket-ic/tests/tests.rs b/packages/pocket-ic/tests/tests.rs index dac40e9a82d..56389585740 100644 --- a/packages/pocket-ic/tests/tests.rs +++ b/packages/pocket-ic/tests/tests.rs @@ -1113,7 +1113,7 @@ fn test_ecdsa_disabled() { .unwrap() .0 .unwrap_err(); - assert!(ecdsa_signature_err.contains("Requested unknown or signing disabled threshold key: ecdsa:Secp256k1:dfx_test_key, existing keys with signing enabled: []")); + assert!(ecdsa_signature_err.contains("Requested unknown or disabled threshold key: ecdsa:Secp256k1:dfx_test_key, existing enabled keys: []")); } #[test] diff --git a/rs/canonical_state/src/traversal.rs b/rs/canonical_state/src/traversal.rs index a4c8947859d..4be419c08cd 100644 --- a/rs/canonical_state/src/traversal.rs +++ b/rs/canonical_state/src/traversal.rs @@ -639,14 +639,14 @@ mod tests { nodes: BTreeSet::new(), subnet_type: SubnetType::Application, subnet_features: SubnetFeatures::default(), - idkg_keys_held: BTreeSet::new(), + chain_keys_held: BTreeSet::new(), }, subnet_test_id(1) => SubnetTopology { public_key: vec![5, 6, 7, 8], nodes: BTreeSet::new(), subnet_type: SubnetType::Application, subnet_features: SubnetFeatures::default(), - idkg_keys_held: BTreeSet::new(), + chain_keys_held: BTreeSet::new(), } }; fn id_range(from: u64, to: u64) -> CanisterIdRange { diff --git a/rs/execution_environment/benches/management_canister/ecdsa.rs b/rs/execution_environment/benches/management_canister/ecdsa.rs index 2f784c26152..1e949a2a0ca 100644 --- a/rs/execution_environment/benches/management_canister/ecdsa.rs +++ b/rs/execution_environment/benches/management_canister/ecdsa.rs @@ -44,7 +44,7 @@ fn run_bench( .with_subnet_type(SubnetType::Application) .with_nns_subnet_id(nns_subnet_id) .with_subnet_id(subnet_id) - .with_idkg_key(MasterPublicKeyId::Ecdsa(ecdsa_key.clone())) + .with_chain_key(MasterPublicKeyId::Ecdsa(ecdsa_key.clone())) .build(); let test_canister = env .install_canister_with_cycles( diff --git a/rs/execution_environment/src/execution_environment.rs b/rs/execution_environment/src/execution_environment.rs index 81f9ae0700a..7d5e323efd6 100644 --- a/rs/execution_environment/src/execution_environment.rs +++ b/rs/execution_environment/src/execution_environment.rs @@ -2752,9 +2752,9 @@ impl ExecutionEnvironment { let threshold_key = args.key_id(); - // Check if signing is enabled. + // Check if the key is enabled. if !topology - .idkg_signing_subnets(&threshold_key) + .chain_key_enabled_subnets(&threshold_key) .contains(&state.metadata.own_subnet_id) { return Err(UserError::new( diff --git a/rs/execution_environment/src/scheduler/test_utilities.rs b/rs/execution_environment/src/scheduler/test_utilities.rs index f12927f380c..10502193f60 100644 --- a/rs/execution_environment/src/scheduler/test_utilities.rs +++ b/rs/execution_environment/src/scheduler/test_utilities.rs @@ -669,7 +669,7 @@ pub(crate) struct SchedulerTestBuilder { rate_limiting_of_heap_delta: bool, deterministic_time_slicing: bool, log: ReplicaLogger, - idkg_keys: Vec, + master_public_key_ids: Vec, metrics_registry: MetricsRegistry, round_summary: Option, replica_version: ReplicaVersion, @@ -696,7 +696,7 @@ impl Default for SchedulerTestBuilder { rate_limiting_of_heap_delta: false, deterministic_time_slicing: true, log: no_op_logger(), - idkg_keys: vec![], + master_public_key_ids: vec![], metrics_registry: MetricsRegistry::new(), round_summary: None, replica_version: ReplicaVersion::default(), @@ -763,15 +763,18 @@ impl SchedulerTestBuilder { } } - pub fn with_idkg_key(self, idkg_key: MasterPublicKeyId) -> Self { + pub fn with_chain_key(self, key_id: MasterPublicKeyId) -> Self { Self { - idkg_keys: vec![idkg_key], + master_public_key_ids: vec![key_id], ..self } } - pub fn with_idkg_keys(self, idkg_keys: Vec) -> Self { - Self { idkg_keys, ..self } + pub fn with_chain_keys(self, master_public_key_ids: Vec) -> Self { + Self { + master_public_key_ids, + ..self + } } pub fn with_batch_time(self, batch_time: Time) -> Self { @@ -816,23 +819,23 @@ impl SchedulerTestBuilder { state.metadata.batch_time = self.batch_time; let config = SubnetConfig::new(self.subnet_type).cycles_account_manager_config; - for idkg_key in &self.idkg_keys { + for key_id in &self.master_public_key_ids { state .metadata .network_topology - .idkg_signing_subnets - .insert(idkg_key.clone(), vec![self.own_subnet_id]); + .chain_key_enabled_subnets + .insert(key_id.clone(), vec![self.own_subnet_id]); state .metadata .network_topology .subnets .get_mut(&self.own_subnet_id) .unwrap() - .idkg_keys_held - .insert(idkg_key.clone()); + .chain_keys_held + .insert(key_id.clone()); registry_settings.chain_key_settings.insert( - idkg_key.clone(), + key_id.clone(), ChainKeySettings { max_queue_size: 20, pre_signatures_to_create_in_advance: 5, @@ -840,7 +843,7 @@ impl SchedulerTestBuilder { ); } let chain_key_subnet_public_keys: BTreeMap<_, _> = self - .idkg_keys + .master_public_key_ids .into_iter() .map(|key_id| { ( diff --git a/rs/execution_environment/src/scheduler/tests.rs b/rs/execution_environment/src/scheduler/tests.rs index fb0c0bb64c3..fd1d6e2c1f7 100644 --- a/rs/execution_environment/src/scheduler/tests.rs +++ b/rs/execution_environment/src/scheduler/tests.rs @@ -3733,7 +3733,7 @@ fn threshold_signature_agreements_metric_is_updated() { let master_schnorr_key_id = MasterPublicKeyId::Schnorr(schnorr_key_id.clone()); let mut test = SchedulerTestBuilder::new() .with_replica_version(ReplicaVersion::default()) - .with_idkg_keys(vec![ + .with_chain_keys(vec![ master_ecdsa_key_id.clone(), master_schnorr_key_id.clone(), ]) @@ -3914,7 +3914,7 @@ fn threshold_signature_agreements_metric_is_updated() { fn consumed_cycles_ecdsa_outcalls_are_added_to_consumed_cycles_total() { let key_id = make_ecdsa_key_id(0); let mut test = SchedulerTestBuilder::new() - .with_idkg_key(MasterPublicKeyId::Ecdsa(key_id.clone())) + .with_chain_key(MasterPublicKeyId::Ecdsa(key_id.clone())) .build(); let fee = test.ecdsa_signature_fee(); @@ -5690,7 +5690,7 @@ fn inject_ecdsa_signing_request(test: &mut SchedulerTest, key_id: &EcdsaKeyId) { fn test_sign_with_ecdsa_contexts_are_not_updated_without_quadruples() { let key_id = make_ecdsa_key_id(0); let mut test = SchedulerTestBuilder::new() - .with_idkg_key(MasterPublicKeyId::Ecdsa(key_id.clone())) + .with_chain_key(MasterPublicKeyId::Ecdsa(key_id.clone())) .build(); inject_ecdsa_signing_request(&mut test, &key_id); @@ -5716,7 +5716,7 @@ fn test_sign_with_ecdsa_contexts_are_not_updated_without_quadruples() { fn test_sign_with_ecdsa_contexts_are_updated_with_quadruples() { let key_id = make_ecdsa_key_id(0); let mut test = SchedulerTestBuilder::new() - .with_idkg_key(MasterPublicKeyId::Ecdsa(key_id.clone())) + .with_chain_key(MasterPublicKeyId::Ecdsa(key_id.clone())) .build(); let pre_sig_id = PreSigId(0); let pre_sig_ids = BTreeSet::from_iter([pre_sig_id]); @@ -5779,7 +5779,7 @@ fn test_sign_with_ecdsa_contexts_are_updated_with_quadruples() { fn test_sign_with_ecdsa_contexts_are_matched_under_multiple_keys() { let key_ids: Vec<_> = (0..3).map(make_ecdsa_key_id).collect(); let mut test = SchedulerTestBuilder::new() - .with_idkg_keys( + .with_chain_keys( key_ids .iter() .cloned() diff --git a/rs/execution_environment/tests/execution_test.rs b/rs/execution_environment/tests/execution_test.rs index f17a0318e31..da04b64786f 100644 --- a/rs/execution_environment/tests/execution_test.rs +++ b/rs/execution_environment/tests/execution_test.rs @@ -1715,7 +1715,7 @@ fn test_consensus_queue_invariant_on_exceeding_heap_delta_limit() { subnet_config, HypervisorConfig::default(), ))) - .with_idkg_key(MasterPublicKeyId::Ecdsa(key_id.clone())) + .with_chain_key(MasterPublicKeyId::Ecdsa(key_id.clone())) .build(); let canister_id = env .install_canister_with_cycles( diff --git a/rs/execution_environment/tests/subnet_size_test.rs b/rs/execution_environment/tests/subnet_size_test.rs index 2313dc6b89c..52e4c58feb1 100644 --- a/rs/execution_environment/tests/subnet_size_test.rs +++ b/rs/execution_environment/tests/subnet_size_test.rs @@ -454,7 +454,7 @@ fn simulate_sign_with_ecdsa_cost( .with_subnet_size(subnet_size) .with_nns_subnet_id(nns_subnet_id) .with_subnet_id(subnet_id) - .with_idkg_key(MasterPublicKeyId::Ecdsa(key_id.clone())) + .with_chain_key(MasterPublicKeyId::Ecdsa(key_id.clone())) .build(); // Create canister with initial cycles for some unrelated costs (eg. ingress induction, heartbeat). let canister_id = diff --git a/rs/execution_environment/tests/threshold_signatures.rs b/rs/execution_environment/tests/threshold_signatures.rs index 8b42b70578e..d916dc1d10a 100644 --- a/rs/execution_environment/tests/threshold_signatures.rs +++ b/rs/execution_environment/tests/threshold_signatures.rs @@ -214,7 +214,7 @@ fn test_compute_initial_idkg_dealings_sender_on_nns() { .with_checkpoints_enabled(false) .with_subnet_id(nns_subnet) .with_nns_subnet_id(nns_subnet) - .with_idkg_key(key_id.clone()) + .with_chain_key(key_id.clone()) .build(); let canister_id = create_universal_canister(&env); @@ -270,7 +270,7 @@ fn test_compute_initial_idkg_dealings_sender_not_on_nns() { .with_checkpoints_enabled(false) .with_subnet_id(own_subnet) .with_nns_subnet_id(nns_subnet) - .with_idkg_key(key_id.clone()) + .with_chain_key(key_id.clone()) .build(); let canister_id = create_universal_canister(&env); @@ -363,7 +363,7 @@ fn test_sign_with_threshold_key_fee_charged() { .with_nns_subnet_id(nns_subnet) .with_ecdsa_signature_fee(fee) .with_schnorr_signature_fee(fee) - .with_idkg_key(key_id.clone()) + .with_chain_key(key_id.clone()) .build(); let canister_id = create_universal_canister(&env); @@ -429,7 +429,7 @@ fn test_sign_with_threshold_key_rejected_without_fee() { .with_nns_subnet_id(nns_subnet) .with_ecdsa_signature_fee(fee) .with_schnorr_signature_fee(fee) - .with_idkg_key(key_id.clone()) + .with_chain_key(key_id.clone()) .build(); let canister_id = create_universal_canister(&env); @@ -478,7 +478,7 @@ fn test_sign_with_threshold_key_unknown_key_rejected() { .with_checkpoints_enabled(false) .with_subnet_id(own_subnet) .with_nns_subnet_id(nns_subnet) - .with_idkg_key(correct_key.clone()) + .with_chain_key(correct_key.clone()) .build(); let canister_id = create_universal_canister(&env); @@ -487,7 +487,7 @@ fn test_sign_with_threshold_key_unknown_key_rejected() { assert_eq!( result, Ok(WasmResult::Reject(format!( - "Unable to route management canister request {}: ChainKeyError(\"Requested unknown or signing disabled threshold key: {}, existing keys with signing enabled: {}\")", + "Unable to route management canister request {}: ChainKeyError(\"Requested unknown or disabled threshold key: {}, existing enabled keys: {}\")", method, wrong_key, format_keys(vec![correct_key]), @@ -522,7 +522,7 @@ fn test_signing_disabled_vs_unknown_key_on_public_key_and_signing_requests() { .with_subnet_type(SubnetType::System) .with_subnet_id(own_subnet) .with_nns_subnet_id(nns_subnet) - .with_signing_disabled_idkg_key(signing_disabled_key.clone()) + .with_disabled_chain_key(signing_disabled_key.clone()) .build(); let canister_id = create_universal_canister(&env); @@ -554,7 +554,7 @@ fn test_signing_disabled_vs_unknown_key_on_public_key_and_signing_requests() { sign_with_method, signing_disabled_key.clone(), )), - "Requested unknown or signing disabled threshold key" + "Requested unknown or disabled threshold key" ); // Requesting non-existent public key (should fail). @@ -576,7 +576,7 @@ fn test_signing_disabled_vs_unknown_key_on_public_key_and_signing_requests() { sign_with_method, unknown_key.clone(), )), - "Requested unknown or signing disabled threshold key" + "Requested unknown or disabled threshold key" ); } } @@ -602,7 +602,7 @@ fn test_threshold_key_public_key_req_with_unknown_key_rejected() { .with_checkpoints_enabled(false) .with_subnet_id(own_subnet) .with_nns_subnet_id(nns_subnet) - .with_idkg_key(correct_key.clone()) + .with_chain_key(correct_key.clone()) .build(); let canister_id = create_universal_canister(&env); @@ -636,7 +636,7 @@ fn test_sign_with_threshold_key_fee_ignored_for_nns() { .with_nns_subnet_id(nns_subnet) .with_ecdsa_signature_fee(fee) .with_schnorr_signature_fee(fee) - .with_idkg_key(key_id.clone()) + .with_chain_key(key_id.clone()) .build(); let canister_id = create_universal_canister(&env); @@ -686,7 +686,7 @@ fn test_sign_with_threshold_key_queue_fills_up() { .with_nns_subnet_id(nns_subnet) .with_ecdsa_signature_fee(fee) .with_schnorr_signature_fee(fee) - .with_idkg_key(key_id.clone()) + .with_chain_key(key_id.clone()) // Turn off automatic ECDSA signatures to fill up the queue. .with_ecdsa_signing_enabled(false) // Turn off automatic Schnorr signatures to fill up the queue. diff --git a/rs/http_endpoints/public/tests/common/mod.rs b/rs/http_endpoints/public/tests/common/mod.rs index 3c509da1154..35e71bf5344 100644 --- a/rs/http_endpoints/public/tests/common/mod.rs +++ b/rs/http_endpoints/public/tests/common/mod.rs @@ -194,7 +194,7 @@ pub fn default_get_latest_state() -> Labeled> { routing_table: Arc::new(RoutingTable::default()), canister_migrations: Arc::new(CanisterMigrations::default()), nns_subnet_id: subnet_test_id(1), - idkg_signing_subnets: Default::default(), + chain_key_enabled_subnets: Default::default(), bitcoin_mainnet_canister_id: None, bitcoin_testnet_canister_id: None, }; diff --git a/rs/messaging/src/message_routing.rs b/rs/messaging/src/message_routing.rs index e4486b18bc0..2f842708c45 100644 --- a/rs/messaging/src/message_routing.rs +++ b/rs/messaging/src/message_routing.rs @@ -984,7 +984,7 @@ impl BatchProcessorImpl { )) })?; let subnet_features: SubnetFeatures = subnet_record.features.unwrap_or_default().into(); - let idkg_keys_held = subnet_record + let chain_keys_held = subnet_record .chain_key_config .map(|chain_key_config| { chain_key_config @@ -1012,7 +1012,7 @@ impl BatchProcessorImpl { nodes, subnet_type, subnet_features, - idkg_keys_held, + chain_keys_held, }, ); } @@ -1034,7 +1034,7 @@ impl BatchProcessorImpl { .map_err(|err| registry_error("NNS subnet ID", None, err))? .ok_or_else(|| not_found_error("NNS subnet ID", None))?; - let idkg_signing_subnets = self + let chain_key_enabled_subnets = self .registry .get_chain_key_signing_subnets(registry_version) .map_err(|err| registry_error("chain key signing subnets", None, err))? @@ -1045,7 +1045,7 @@ impl BatchProcessorImpl { routing_table: Arc::new(routing_table), nns_subnet_id, canister_migrations: Arc::new(canister_migrations), - idkg_signing_subnets, + chain_key_enabled_subnets, bitcoin_testnet_canister_id: self.bitcoin_config.testnet_canister_id, bitcoin_mainnet_canister_id: self.bitcoin_config.mainnet_canister_id, }) diff --git a/rs/messaging/src/message_routing/tests.rs b/rs/messaging/src/message_routing/tests.rs index dbb09e70de4..22e6d08066d 100644 --- a/rs/messaging/src/message_routing/tests.rs +++ b/rs/messaging/src/message_routing/tests.rs @@ -333,7 +333,7 @@ struct TestRecords<'a, const N: usize> { nns_subnet_id: Integrity, // MasterPublicKeyId is used to make a key for the record. An empty `BTreeMap` therefore means no // records in the registry and wrapping it in `Integrity` would be redundant. - idkg_signing_subnets: &'a BTreeMap>>, + chain_key_enabled_subnets: &'a BTreeMap>>, provisional_whitelist: Integrity<&'a ProvisionalWhitelist>, routing_table: Integrity<&'a RoutingTable>, canister_migrations: Integrity<&'a CanisterMigrations>, @@ -472,16 +472,16 @@ impl RegistryFixture { ) } - /// Writes the iDKG signing subnets into the registry. - fn write_idkg_signing_subnets( + /// Writes the chain key enabled subnets into the registry. + fn write_chain_key_enabled_subnets( &self, - idkg_signing_subnets: &BTreeMap>>, + chain_key_enabled_subnets: &BTreeMap>>, ) -> Result<(), ProtoRegistryDataProviderError> { use ic_types::subnet_id_into_protobuf; - for (idkg_key, subnet_ids) in idkg_signing_subnets.iter() { + for (key_id, subnet_ids) in chain_key_enabled_subnets.iter() { self.write_record( - &make_chain_key_signing_subnet_list_key(idkg_key), + &make_chain_key_signing_subnet_list_key(key_id), subnet_ids .as_ref() .map(|subnet_ids| ChainKeySigningSubnetList { @@ -597,7 +597,7 @@ impl RegistryFixture { self.write_routing_table(input.routing_table)?; self.write_canister_migrations(input.canister_migrations)?; self.write_root_subnet_id(input.nns_subnet_id)?; - self.write_idkg_signing_subnets(input.idkg_signing_subnets)?; + self.write_chain_key_enabled_subnets(input.chain_key_enabled_subnets)?; self.write_provisional_whitelist(input.provisional_whitelist)?; self.write_node_public_keys(input.node_public_keys)?; self.write_api_boundary_nodes_records(input.api_boundary_node_records)?; @@ -777,7 +777,7 @@ fn try_read_registry_succeeds_with_fully_specified_registry_records() { PrincipalId::new_node_test_id(103), PrincipalId::new_subnet_test_id(107) }); - let idkg_signing_subnets = btreemap! { + let chain_key_enabled_subnets = btreemap! { MasterPublicKeyId::Ecdsa(EcdsaKeyId { curve: EcdsaCurve::Secp256k1, name: "key 1".to_string(), @@ -869,7 +869,7 @@ fn try_read_registry_succeeds_with_fully_specified_registry_records() { subnet_records: [Valid(&own_subnet_record), Valid(&other_subnet_record)], ni_dkg_transcripts: [Valid(Some(&own_transcript)), Valid(Some(&other_transcript))], nns_subnet_id: Valid(nns_subnet_id), - idkg_signing_subnets: &idkg_signing_subnets, + chain_key_enabled_subnets: &chain_key_enabled_subnets, provisional_whitelist: Valid(&provisional_whitelist), routing_table: Valid(&routing_table), canister_migrations: Valid(&canister_migrations), @@ -929,14 +929,14 @@ fn try_read_registry_succeeds_with_fully_specified_registry_records() { .iter() .map(|key_config| key_config.key_id.clone()) .collect::>(), - subnet_topology.idkg_keys_held + subnet_topology.chain_keys_held ); } assert_eq!(nns_subnet_id, network_topology.nns_subnet_id); assert_eq!( - idkg_signing_subnets, + chain_key_enabled_subnets, network_topology - .idkg_signing_subnets + .chain_key_enabled_subnets .iter() .map(|(key, val)| (key.clone(), Valid(val.clone()))) .collect::>() @@ -1081,7 +1081,7 @@ fn try_read_registry_succeeds_with_minimal_registry_records() { subnet_records: [Valid(&own_subnet_record)], ni_dkg_transcripts: [Valid(Some(&own_transcript))], nns_subnet_id: Valid(nns_subnet_id), - idkg_signing_subnets: &BTreeMap::default(), + chain_key_enabled_subnets: &BTreeMap::default(), provisional_whitelist: Missing, routing_table: Missing, canister_migrations: Missing, @@ -1182,7 +1182,7 @@ fn try_to_read_registry_returns_errors_for_corrupted_records() { subnet_records: [Valid(&own_subnet_record)], ni_dkg_transcripts: [Valid(Some(&own_transcript))], nns_subnet_id: Valid(nns_subnet_id), - idkg_signing_subnets: &BTreeMap::default(), + chain_key_enabled_subnets: &BTreeMap::default(), provisional_whitelist: Missing, routing_table: Missing, canister_migrations: Missing, @@ -1365,7 +1365,7 @@ fn try_read_registry_can_skip_missing_or_invalid_node_public_keys() { subnet_records: [Valid(&own_subnet_record)], ni_dkg_transcripts: [Valid(Some(&own_transcript))], nns_subnet_id: Valid(nns_subnet_id), - idkg_signing_subnets: &BTreeMap::default(), + chain_key_enabled_subnets: &BTreeMap::default(), provisional_whitelist: Missing, routing_table: Missing, canister_migrations: Missing, @@ -1454,7 +1454,7 @@ fn try_read_registry_can_skip_missing_or_invalid_fields_of_api_boundary_nodes() subnet_records: [Valid(&own_subnet_record)], ni_dkg_transcripts: [Valid(Some(&own_transcript))], nns_subnet_id: Valid(nns_subnet_id), - idkg_signing_subnets: &BTreeMap::default(), + chain_key_enabled_subnets: &BTreeMap::default(), provisional_whitelist: Missing, routing_table: Missing, canister_migrations: Missing, @@ -1603,7 +1603,7 @@ fn check_critical_error_counter_is_not_incremented_for_transient_error() { subnet_records: [Valid(&own_subnet_record)], ni_dkg_transcripts: [Valid(Some(&own_transcript))], nns_subnet_id: Valid(nns_subnet_id), - idkg_signing_subnets: &BTreeMap::default(), + chain_key_enabled_subnets: &BTreeMap::default(), provisional_whitelist: Missing, routing_table: Missing, canister_migrations: Missing, @@ -1754,7 +1754,7 @@ fn process_batch_updates_subnet_metrics() { PrincipalId::new_node_test_id(103), PrincipalId::new_subnet_test_id(107) }); - let idkg_signing_subnets = btreemap! { + let chain_key_enabled_subnets = btreemap! { MasterPublicKeyId::Ecdsa(EcdsaKeyId { curve: EcdsaCurve::Secp256k1, name: "key 1".to_string(), @@ -1810,7 +1810,7 @@ fn process_batch_updates_subnet_metrics() { subnet_records: [Valid(&own_subnet_record), Valid(&other_subnet_record)], ni_dkg_transcripts: [Valid(Some(&own_transcript)), Valid(Some(&other_transcript))], nns_subnet_id: Valid(nns_subnet_id), - idkg_signing_subnets: &idkg_signing_subnets, + chain_key_enabled_subnets: &chain_key_enabled_subnets, provisional_whitelist: Valid(&provisional_whitelist), routing_table: Valid(&routing_table), canister_migrations: Valid(&canister_migrations), diff --git a/rs/messaging/src/state_machine/tests.rs b/rs/messaging/src/state_machine/tests.rs index 235633b11c0..33e309f8751 100644 --- a/rs/messaging/src/state_machine/tests.rs +++ b/rs/messaging/src/state_machine/tests.rs @@ -117,7 +117,7 @@ fn test_fixture(provided_batch: &Batch) -> StateMachineTestFixture { nodes: BTreeSet::new(), subnet_type: SubnetType::Application, subnet_features: SubnetFeatures::default(), - idkg_keys_held: BTreeSet::new(), + chain_keys_held: BTreeSet::new(), }, ); diff --git a/rs/pocket_ic_server/src/pocket_ic.rs b/rs/pocket_ic_server/src/pocket_ic.rs index da214c14218..19f1d5b5f27 100644 --- a/rs/pocket_ic_server/src/pocket_ic.rs +++ b/rs/pocket_ic_server/src/pocket_ic.rs @@ -728,7 +728,7 @@ impl PocketIc { algorithm, name: name.to_string(), }; - builder = builder.with_idkg_key(MasterPublicKeyId::Schnorr(key_id)); + builder = builder.with_chain_key(MasterPublicKeyId::Schnorr(key_id)); } } @@ -737,7 +737,7 @@ impl PocketIc { curve: EcdsaCurve::Secp256k1, name: name.to_string(), }; - builder = builder.with_idkg_key(MasterPublicKeyId::Ecdsa(key_id)); + builder = builder.with_chain_key(MasterPublicKeyId::Ecdsa(key_id)); } } diff --git a/rs/protobuf/def/state/metadata/v1/metadata.proto b/rs/protobuf/def/state/metadata/v1/metadata.proto index 06194d23af8..f48c5afe94b 100644 --- a/rs/protobuf/def/state/metadata/v1/metadata.proto +++ b/rs/protobuf/def/state/metadata/v1/metadata.proto @@ -28,7 +28,7 @@ message SubnetTopology { registry.subnet.v1.SubnetFeatures subnet_features = 4; reserved 5; reserved "ecdsa_keys_held"; - repeated types.v1.MasterPublicKeyId idkg_keys_held = 6; + repeated types.v1.MasterPublicKeyId chain_keys_held = 6; } message SubnetsEntry { @@ -36,7 +36,7 @@ message SubnetsEntry { SubnetTopology subnet_topology = 2; } -message IDkgKeyEntry { +message ChainKeySubnetEntry { types.v1.MasterPublicKeyId key_id = 1; repeated types.v1.SubnetId subnet_ids = 2; } @@ -50,7 +50,7 @@ message NetworkTopology { reserved "ecdsa_signing_subnets"; repeated types.v1.CanisterId bitcoin_testnet_canister_ids = 6; repeated types.v1.CanisterId bitcoin_mainnet_canister_ids = 7; - repeated IDkgKeyEntry idkg_signing_subnets = 8; + repeated ChainKeySubnetEntry chain_key_enabled_subnets = 8; } message SetupInitialDkgContext { diff --git a/rs/protobuf/src/gen/state/state.metadata.v1.rs b/rs/protobuf/src/gen/state/state.metadata.v1.rs index 02927c828f4..c396d91c3ff 100644 --- a/rs/protobuf/src/gen/state/state.metadata.v1.rs +++ b/rs/protobuf/src/gen/state/state.metadata.v1.rs @@ -26,7 +26,8 @@ pub struct SubnetTopology { pub subnet_features: ::core::option::Option, #[prost(message, repeated, tag = "6")] - pub idkg_keys_held: ::prost::alloc::vec::Vec, + pub chain_keys_held: + ::prost::alloc::vec::Vec, } #[derive(Clone, PartialEq, ::prost::Message)] pub struct SubnetsEntry { @@ -36,7 +37,7 @@ pub struct SubnetsEntry { pub subnet_topology: ::core::option::Option, } #[derive(Clone, PartialEq, ::prost::Message)] -pub struct IDkgKeyEntry { +pub struct ChainKeySubnetEntry { #[prost(message, optional, tag = "1")] pub key_id: ::core::option::Option, #[prost(message, repeated, tag = "2")] @@ -62,7 +63,7 @@ pub struct NetworkTopology { pub bitcoin_mainnet_canister_ids: ::prost::alloc::vec::Vec, #[prost(message, repeated, tag = "8")] - pub idkg_signing_subnets: ::prost::alloc::vec::Vec, + pub chain_key_enabled_subnets: ::prost::alloc::vec::Vec, } #[derive(Clone, PartialEq, ::prost::Message)] pub struct SetupInitialDkgContext { diff --git a/rs/replicated_state/src/metadata_state.rs b/rs/replicated_state/src/metadata_state.rs index d49ea9a8aec..5d55f2380ed 100644 --- a/rs/replicated_state/src/metadata_state.rs +++ b/rs/replicated_state/src/metadata_state.rs @@ -191,9 +191,9 @@ pub struct NetworkTopology { pub canister_migrations: Arc, pub nns_subnet_id: SubnetId, - /// Mapping from iDKG key_id to a list of subnets which can sign with the - /// given key. Keys without any signing subnets are not included in the map. - pub idkg_signing_subnets: BTreeMap>, + /// Mapping from master public key_id to a list of subnets which can use the + /// given key. Keys without any chain-key enabled subnets are not included in the map. + pub chain_key_enabled_subnets: BTreeMap>, /// The ID of the canister to forward bitcoin testnet requests to. pub bitcoin_testnet_canister_id: Option, @@ -224,7 +224,7 @@ impl Default for NetworkTopology { routing_table: Default::default(), canister_migrations: Default::default(), nns_subnet_id: SubnetId::new(PrincipalId::new_anonymous()), - idkg_signing_subnets: Default::default(), + chain_key_enabled_subnets: Default::default(), bitcoin_testnet_canister_id: None, bitcoin_mainnet_canister_id: None, } @@ -232,9 +232,9 @@ impl Default for NetworkTopology { } impl NetworkTopology { - /// Returns a list of subnets where the iDKG feature is enabled. - pub fn idkg_signing_subnets(&self, key_id: &MasterPublicKeyId) -> &[SubnetId] { - self.idkg_signing_subnets + /// Returns a list of subnets where the chain key feature is enabled. + pub fn chain_key_enabled_subnets(&self, key_id: &MasterPublicKeyId) -> &[SubnetId] { + self.chain_key_enabled_subnets .get(key_id) .map_or(&[], |ids| &ids[..]) } @@ -269,15 +269,15 @@ impl From<&NetworkTopology> for pb_metadata::NetworkTopology { Some(c) => vec![pb_types::CanisterId::from(c)], None => vec![], }, - idkg_signing_subnets: item - .idkg_signing_subnets + chain_key_enabled_subnets: item + .chain_key_enabled_subnets .iter() .map(|(key_id, subnet_ids)| { let subnet_ids = subnet_ids .iter() .map(|id| subnet_id_into_protobuf(*id)) .collect(); - pb_metadata::IDkgKeyEntry { + pb_metadata::ChainKeySubnetEntry { key_id: Some(key_id.into()), subnet_ids, } @@ -306,14 +306,14 @@ impl TryFrom for NetworkTopology { "NetworkTopology::nns_subnet_id", )?)?; - let mut idkg_signing_subnets = BTreeMap::new(); - for entry in item.idkg_signing_subnets { + let mut chain_key_enabled_subnets = BTreeMap::new(); + for entry in item.chain_key_enabled_subnets { let mut subnet_ids = vec![]; for subnet_id in entry.subnet_ids { subnet_ids.push(subnet_id_try_from_protobuf(subnet_id)?); } - idkg_signing_subnets.insert( - try_from_option_field(entry.key_id, "IDkgKeyEntry::key_id")?, + chain_key_enabled_subnets.insert( + try_from_option_field(entry.key_id, "ChainKeySubnetEntry::key_id")?, subnet_ids, ); } @@ -343,7 +343,7 @@ impl TryFrom for NetworkTopology { .unwrap_or_default() .into(), nns_subnet_id, - idkg_signing_subnets, + chain_key_enabled_subnets, bitcoin_testnet_canister_id, bitcoin_mainnet_canister_id, }) @@ -358,12 +358,12 @@ pub struct SubnetTopology { pub nodes: BTreeSet, pub subnet_type: SubnetType, pub subnet_features: SubnetFeatures, - /// iDKG keys held by this subnet. Just because a subnet holds an iDKG key - /// doesn't mean the subnet has been enabled to sign with that key. This + /// Chain keys held by this subnet. Just because a subnet holds a Chain key + /// doesn't mean the subnet has been enabled to use that key. This /// will happen when a key is shared with a second subnet which holds it as /// a backup. An additional NNS proposal will be needed to allow the subnet - /// holding the key as backup to actually produce signatures. - pub idkg_keys_held: BTreeSet, + /// holding the key as backup to actually produce signatures or VetKd key derivations. + pub chain_keys_held: BTreeSet, } impl From<&SubnetTopology> for pb_metadata::SubnetTopology { @@ -379,7 +379,7 @@ impl From<&SubnetTopology> for pb_metadata::SubnetTopology { .collect(), subnet_type: i32::from(item.subnet_type), subnet_features: Some(pb_subnet::SubnetFeatures::from(item.subnet_features)), - idkg_keys_held: item.idkg_keys_held.iter().map(|k| k.into()).collect(), + chain_keys_held: item.chain_keys_held.iter().map(|k| k.into()).collect(), } } } @@ -392,9 +392,9 @@ impl TryFrom for SubnetTopology { nodes.insert(node_id_try_from_option(entry.node_id)?); } - let mut idkg_keys_held = BTreeSet::new(); - for key in item.idkg_keys_held { - idkg_keys_held.insert(MasterPublicKeyId::try_from(key)?); + let mut chain_keys_held = BTreeSet::new(); + for key in item.chain_keys_held { + chain_keys_held.insert(MasterPublicKeyId::try_from(key)?); } Ok(Self { @@ -408,7 +408,7 @@ impl TryFrom for SubnetTopology { .subnet_features .map(SubnetFeatures::from) .unwrap_or_default(), - idkg_keys_held, + chain_keys_held, }) } } diff --git a/rs/replicated_state/src/metadata_state/tests.rs b/rs/replicated_state/src/metadata_state/tests.rs index a43a413420f..7ed1bc4818a 100644 --- a/rs/replicated_state/src/metadata_state/tests.rs +++ b/rs/replicated_state/src/metadata_state/tests.rs @@ -933,7 +933,7 @@ fn empty_network_topology() { }; assert_eq!( - network_topology.idkg_signing_subnets(&MasterPublicKeyId::Ecdsa(make_key_id())), + network_topology.chain_key_enabled_subnets(&MasterPublicKeyId::Ecdsa(make_key_id())), vec![] ); } @@ -946,14 +946,14 @@ fn network_topology_ecdsa_subnets() { routing_table: Arc::new(RoutingTable::default()), canister_migrations: Arc::new(CanisterMigrations::default()), nns_subnet_id: subnet_test_id(42), - idkg_signing_subnets: btreemap! { + chain_key_enabled_subnets: btreemap! { key.clone() => vec![subnet_test_id(1)], }, ..Default::default() }; assert_eq!( - network_topology.idkg_signing_subnets(&key), + network_topology.chain_key_enabled_subnets(&key), &[subnet_test_id(1)] ); } diff --git a/rs/state_machine_tests/src/lib.rs b/rs/state_machine_tests/src/lib.rs index 839a4ed0e1f..98a1897d39a 100644 --- a/rs/state_machine_tests/src/lib.rs +++ b/rs/state_machine_tests/src/lib.rs @@ -279,7 +279,7 @@ pub fn finalize_registry( fn make_nodes_registry( subnet_id: SubnetId, subnet_type: SubnetType, - idkg_keys_signing_enabled_status: &BTreeMap, + chain_keys_enabled_status: &BTreeMap, features: SubnetFeatures, registry_data_provider: Arc, nodes: &Vec, @@ -293,15 +293,15 @@ fn make_nodes_registry( let latest_registry_version = registry_data_provider.latest_version(); RegistryVersion::from(latest_registry_version.get() + 1) }; - // ECDSA subnet_id must be different from nns_subnet_id, otherwise - // `sign_with_ecdsa` won't be charged. + // subnet_id must be different from nns_subnet_id, otherwise + // the IC00 call won't be charged. let subnet_id_proto = SubnetIdProto { principal_id: Some(PrincipalIdIdProto { raw: subnet_id.get_ref().to_vec(), }), }; - for (key_id, is_signing_enabled) in idkg_keys_signing_enabled_status { - if !*is_signing_enabled { + for (key_id, is_enabled) in chain_keys_enabled_status { + if !*is_enabled { continue; } registry_data_provider @@ -408,7 +408,7 @@ fn make_nodes_registry( .with_max_block_payload_size(max_block_payload_size) .with_dkg_interval_length(u64::MAX / 2) // use the genesis CUP throughout the test .with_chain_key_config(ChainKeyConfig { - key_configs: idkg_keys_signing_enabled_status + key_configs: chain_keys_enabled_status .iter() .map(|(key_id, _)| KeyConfig { key_id: key_id.clone(), @@ -842,7 +842,7 @@ pub struct StateMachine { // (equal to `time` when this `StateMachine` is initialized) time_of_last_round: RwLock