Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Centralized Signed Blobs #604

Closed
wants to merge 18 commits into from
Closed
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 15 additions & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 1 addition & 7 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -109,13 +109,6 @@ serde_yaml = "0.9.34"

# External Dependencies
## Aptos dependencies
aptos-api = { git = "https://github.com/movementlabsxyz/aptos-core", rev = "6d2ec939e10cc00283519dd0ad9d1cf12e7bf80f" } ## Aptos dependencies
aptos-api-types = { git = "https://github.com/movementlabsxyz/aptos-core", rev = "6d2ec939e10cc00283519dd0ad9d1cf12e7bf80f" }
aptos-bitvec = { git = "https://github.com/movementlabsxyz/aptos-core", rev = "6d2ec939e10cc00283519dd0ad9d1cf12e7bf80f" }
aptos-block-executor = { git = "https://github.com/movementlabsxyz/aptos-core.git", rev = "6d2ec939e10cc00283519dd0ad9d1cf12e7bf80f" }
aptos-cached-packages = { git = "https://github.com/movementlabsxyz/aptos-core", rev = "6d2ec939e10cc00283519dd0ad9d1cf12e7bf80f" }
aptos-config = { git = "https://github.com/movementlabsxyz/aptos-core", rev = "6d2ec939e10cc00283519dd0ad9d1cf12e7bf80f" }
aptos-consensus-types = { git = "https://github.com/movementlabsxyz/aptos-core", rev = "6d2ec939e10cc00283519dd0ad9d1cf12e7bf80f" }
aptos-api = { git = "https://github.com/movementlabsxyz/aptos-core", rev = "c9d4c2d25dfdde02eb2fd3bf73f39ac9d6b3300b" }
aptos-api-types = { git = "https://github.com/movementlabsxyz/aptos-core", rev = "c9d4c2d25dfdde02eb2fd3bf73f39ac9d6b3300b" }
aptos-bitvec = { git = "https://github.com/movementlabsxyz/aptos-core", rev = "c9d4c2d25dfdde02eb2fd3bf73f39ac9d6b3300b" }
Expand Down Expand Up @@ -299,6 +292,7 @@ rustix = "0.38.34"
paste = "1.0.15"
uuid = { version = "1.10.0", features = ["v4"] }
blake-3 = "1.4.0"
ecdsa = { version = "0.16.9", features = ["signing", "verifying", "der"] }

# trying to pin diesel
# diesel = "=2.1.1"
Expand Down
4 changes: 2 additions & 2 deletions networks/suzuka/suzuka-full-node/src/da_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ impl DaDB {
Ok(Self { inner: Arc::new(db) })
}

pub async fn add_executed_block(&self, id: String) -> Result<(), anyhow::Error> {
pub async fn add_executed_block(&self, id: Vec<u8>) -> Result<(), anyhow::Error> {
let da_db = self.inner.clone();
tokio::task::spawn_blocking(move || {
let cf = da_db
Expand All @@ -46,7 +46,7 @@ impl DaDB {
Ok(())
}

pub async fn has_executed_block(&self, id: String) -> Result<bool, anyhow::Error> {
pub async fn has_executed_block(&self, id: Vec<u8>) -> Result<bool, anyhow::Error> {
let da_db = self.inner.clone();
let id = tokio::task::spawn_blocking(move || {
let cf = da_db
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ where

// get the transactions
let transactions_count = block.transactions().len();
let span = info_span!(target: "movement_timing", "execute_block", id = %block_id);
let span = info_span!(target: "movement_timing", "execute_block", id = ?block_id);
let commitment =
self.execute_block_with_retries(block, block_timestamp).instrument(span).await?;

Expand All @@ -146,7 +146,7 @@ where
self.da_db.set_synced_height(da_height - 1).await?;

// set the block as executed
self.da_db.add_executed_block(block_id.to_string()).await?;
self.da_db.add_executed_block(block_id.clone()).await?;

// todo: this needs defaults
if self.settlement_enabled() {
Expand All @@ -158,7 +158,7 @@ where
}
}
} else {
info!(block_id = %block_id, "Skipping settlement");
info!(block_id = ?block_id, "Skipping settlement");
}

Ok(())
Expand Down
2 changes: 1 addition & 1 deletion process-compose/suzuka-full-node/process-compose.setup.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ environment:

processes:

suzuka-setup:
setup:
command: |
suzuka-full-node-setup
depends_on:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,12 @@ package movementlabs.protocol_units.da.m1.light_node.v1beta1;

// Request and response messages
message Blob {
string blob_id = 1;
bytes blob_id = 1;
bytes data = 2;
uint64 height = 3;
// bytes signature = 4; // at some point a signature will be added here
bytes signature = 4; // at some point a signature will be added here
uint64 timestamp = 5;
}

enum VerificationMode {
COWBOY = 0;
VALIDATOR_IN = 1;
M_OF_N = 2;
bytes signer = 6;
}

message BlobResponse {
Expand Down Expand Up @@ -82,16 +77,7 @@ message BatchWriteResponse {
repeated BlobResponse blobs = 1;
}

message UpdateVerificationParametersRequest {
VerificationMode mode = 1;
repeated string signers = 2;
uint32 m = 3;
uint32 n = 4;
}

message UpdateVerificationParametersResponse {
VerificationMode mode = 1;
}


// LightNode service definition
service LightNodeService {
Expand All @@ -109,7 +95,4 @@ service LightNodeService {
rpc BatchRead (BatchReadRequest) returns (BatchReadResponse);
rpc BatchWrite (BatchWriteRequest) returns (BatchWriteResponse);

// Update and manage verification parameters.
rpc UpdateVerificationParameters (UpdateVerificationParametersRequest) returns (UpdateVerificationParametersResponse);

}
2 changes: 2 additions & 0 deletions protocol-units/da/m1/light-node-client/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ anyhow = { workspace = true }
tokio-stream = { workspace = true }
movement-types = { workspace = true }
serde_json = { workspace = true }
ecdsa = { workspace = true }
tonic = { workspace = true }

[features]
sequencer = []
Expand Down
2 changes: 2 additions & 0 deletions protocol-units/da/m1/light-node-verifier/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,9 @@ celestia-types = { workspace = true }
anyhow = { workspace = true }
hex = { workspace = true }
async-stream = { workspace = true }
thiserror = { workspace = true }
serde_json = { workspace = true }
ecdsa = { workspace = true, features = [ "signing", "verifying", "der"] }

[dev-dependencies]
m1-da-light-node-setup = { workspace = true }
Expand Down
Original file line number Diff line number Diff line change
@@ -1,79 +1,70 @@
use crate::Verifier;
use celestia_rpc::{BlobClient, Client, HeaderClient};
use crate::{Error, Verified, VerifierOperations};
use celestia_rpc::Client;
#[cfg(feature = "pessimistic")]
use celestia_rpc::{BlobClient, HeaderClient};
use celestia_types::{nmt::Namespace, Blob};
use m1_da_light_node_grpc::VerificationMode;
use m1_da_light_node_util::inner_blob::InnerBlob;
use std::sync::Arc;

#[derive(Clone)]
pub struct V1Verifier {
pub struct Verifier {
pub client: Arc<Client>,
pub namespace: Namespace,
}

#[tonic::async_trait]
impl Verifier for V1Verifier {
/// All verification is the same for now
async fn verify(
&self,
_verification_mode: VerificationMode,
blob: &[u8],
height: u64,
) -> Result<bool, anyhow::Error> {
let celestia_blob = Blob::new(self.namespace.clone(), blob.to_vec())?;

celestia_blob.validate()?;

// wait for the header to be at the correct height
self.client.header_wait_for_height(height).await?;

// get the root
let dah = self.client.header_get_by_height(height).await?.dah;
let root_hash = dah.row_root(0).ok_or(anyhow::anyhow!("No root hash found"))?;

// get the proof
let proofs = self
.client
.blob_get_proof(height, self.namespace.clone(), celestia_blob.commitment)
.await?;

// get the leaves
let leaves = celestia_blob.to_shares()?;

// check if included
for proof in proofs.iter() {
proof
.verify_complete_namespace(&root_hash, &leaves, self.namespace.into())
.map_err(|e| anyhow::anyhow!("Failed to verify proof: {:?}", e))?;
}

Ok(true)
impl Verifier {
pub fn new(client: Arc<Client>, namespace: Namespace) -> Self {
Self { client, namespace }
}
}

async fn verify_cowboy(
&self,
_verification_mode: VerificationMode,
_blob: &[u8],
_height: u64,
) -> Result<bool, anyhow::Error> {
unimplemented!()
}
#[tonic::async_trait]
impl VerifierOperations<Blob, InnerBlob> for Verifier {
/// Verifies a Celestia Blob as a Valid InnerBlob
async fn verify(&self, blob: Blob, _height: u64) -> Result<Verified<InnerBlob>, Error> {
//@l-monninger: the light node itself does most of the work of verify blobs. The verification under the feature flag below is useful in zero-trust environments.
#[cfg(feature = "pessimistic")]
{
blob.validate().map_err(|e| Error::Validation(e.to_string()))?;

// wait for the header to be at the correct height
self.client
.header_wait_for_height(height)
.await
.map_err(|e| Error::Internal(e.to_string()))?;

// get the root
let dah = self
.client
.header_get_by_height(height)
.await
.map_err(|e| Error::Internal(e.to_string()))?
.dah;
let root_hash = dah.row_root(0).ok_or(Error::Validation("No root hash".to_string()))?;

// get the proof
let proofs = self
.client
.blob_get_proof(height, self.namespace.clone(), blob.commitment)
.await
.map_err(|e| Error::Internal(e.to_string()))?;

// get the leaves
let leaves = blob.to_shares().map_err(|e| Error::Internal(e.to_string()))?;

// check if included
for proof in proofs.iter() {
proof
.verify_complete_namespace(&root_hash, &leaves, self.namespace.into())
.map_err(|_e| {
Error::Validation("failed to verify complete namespace".to_string())
})?;
}
}

async fn verify_m_of_n(
&self,
_verification_mode: VerificationMode,
_blob: &[u8],
_height: u64,
) -> Result<bool, anyhow::Error> {
unimplemented!()
}
let inner_blob = InnerBlob::try_from(blob).map_err(|e| Error::Internal(e.to_string()))?;

async fn verifiy_validator_in(
&self,
_verification_mode: VerificationMode,
_blob: &[u8],
_height: u64,
) -> Result<bool, anyhow::Error> {
unimplemented!()
Ok(Verified::new(inner_blob))
}
}

Expand Down Expand Up @@ -109,7 +100,7 @@ mod tests {
let client = Arc::new(config.connect_celestia().await?);
let celestia_namespace = config.celestia_namespace();

let verifier = V1Verifier { client: client.clone(), namespace: celestia_namespace.clone() };
let verifier = Verifier { client: client.clone(), namespace: celestia_namespace.clone() };

let data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let blob = Blob::new(celestia_namespace.clone(), data.clone())?;
Expand Down Expand Up @@ -142,7 +133,7 @@ mod tests {
let client = Arc::new(config.connect_celestia().await?);
let celestia_namespace = config.celestia_namespace();

let verifier = V1Verifier { client: client.clone(), namespace: celestia_namespace.clone() };
let verifier = Verifier { client: client.clone(), namespace: celestia_namespace.clone() };

let data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let blob = Blob::new(celestia_namespace.clone(), data.clone())?;
Expand Down
Loading
Loading