Skip to content

Commit e078a45

Browse files
committed
Custom Digest-md5 implementation
1 parent 18aa506 commit e078a45

File tree

13 files changed

+396
-225
lines changed

13 files changed

+396
-225
lines changed

Cargo.lock

+9-7
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

README.md

+5-6
Original file line numberDiff line numberDiff line change
@@ -43,21 +43,20 @@ All other settings are generally assumed to be the defaults currently. For insta
4343

4444
### Mac
4545
```
46-
brew install gsasl krb5
46+
brew install krb5
4747
# You might need these env vars on newer Macs
4848
export BINDGEN_EXTRA_CLANG_ARGS="-I/opt/homebrew/include"
4949
export LIBRARY_PATH=/opt/homebrew/lib
50-
cargo build --features token,kerberos
50+
cargo build --features kerberos
5151
```
5252

5353
### Ubuntu
5454
```
55-
apt-get install clang libkrb5-dev libgsasl-dev
56-
cargo build --features token,kerberos
55+
apt-get install clang libkrb5-dev
56+
cargo build --features kerberos
5757
```
5858

5959
## Crate features
60-
- `token` - enables token based DIGEST-MD5 authentication support. This uses the `gsasl` native library and only supports authentication, not integrity or confidentiality
6160
- `kerberos` - enables kerberos GSSAPI authentication support. This uses the `libgssapi` crate and supports integrity as well as confidentiality
6261

6362
## Object store implementation
@@ -67,7 +66,7 @@ An object_store implementation for HDFS is provided in the [hdfs-native-object-s
6766
The tests are mostly integration tests that utilize a small Java application in `rust/mindifs/` that runs a custom `MiniDFSCluster`. To run the tests, you need to have Java, Maven, Hadoop binaries, and Kerberos tools available and on your path. Any Java version between 8 and 17 should work.
6867

6968
```bash
70-
cargo test -p hdfs-native --features token,kerberos,intergation-test
69+
cargo test -p hdfs-native --features kerberos,intergation-test
7170
```
7271

7372
### Python tests

crates/hdfs-native-object-store/Cargo.toml

-1
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,5 @@ which = "4"
2929

3030
[features]
3131
kerberos = ["hdfs-native/kerberos"]
32-
token = ["hdfs-native/token"]
3332

3433
integration-test = ["hdfs-native/integration-test"]

crates/hdfs-native/Cargo.toml

+4-3
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,16 @@ chrono = { workspace = true }
1717
crc = "3.1.0-beta.1"
1818
futures = { workspace = true }
1919
g2p = "1"
20-
gsasl-sys = { version = "0.2", default-features = false, optional = true }
2120
libc = "0.2"
2221
libgssapi = { version = "0.7", default-features = false, optional = true }
2322
log = "0.4"
23+
md5 = "0.7"
2424
num-traits = "0.2"
2525
once_cell = "1"
2626
prost = "0.12"
2727
prost-types = "0.12"
28+
rand = "0.8"
29+
regex = "1"
2830
roxmltree = "0.18"
2931
socket2 = "0.5"
3032
thiserror = "1"
@@ -48,7 +50,6 @@ which = "4"
4850

4951
[features]
5052
kerberos = ["libgssapi"]
51-
token = ["gsasl-sys"]
5253

5354
generate-protobuf = ["prost-build", "protobuf-src"]
5455
integration-test = ["which"]
@@ -64,4 +65,4 @@ harness = false
6465

6566
[[bench]]
6667
name = "rpc"
67-
harness = false
68+
harness = false

crates/hdfs-native/build.rs

-3
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,6 @@
11
use std::io::Result;
22

33
fn main() -> Result<()> {
4-
#[cfg(feature = "token")]
5-
println!("cargo:rustc-link-lib=gsasl");
6-
74
#[cfg(feature = "generate-protobuf")]
85
{
96
std::env::set_var("PROTOC", protobuf_src::protoc());

crates/hdfs-native/minidfs/src/main/java/main/Main.java

+1-3
Original file line numberDiff line numberDiff line change
@@ -64,9 +64,7 @@ public static void main(String args[]) throws Exception {
6464
conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, "hdfs/localhost@" + kdc.getRealm());
6565
conf.set(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, "true");
6666
conf.set(DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY, "true");
67-
if (flags.contains("data_transfer_security")) {
68-
conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
69-
}
67+
conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
7068
}
7169

7270
HdfsConfiguration hdfsConf = new HdfsConfiguration(conf);

crates/hdfs-native/src/hdfs/connection.rs

+1-4
Original file line numberDiff line numberDiff line change
@@ -28,13 +28,11 @@ use crate::proto::common::rpc_response_header_proto::RpcStatusProto;
2828
use crate::proto::common::TokenProto;
2929
use crate::proto::hdfs::DatanodeIdProto;
3030
use crate::proto::{common, hdfs};
31+
use crate::security::sasl::SaslDatanodeConnection;
3132
use crate::security::sasl::{SaslReader, SaslRpcClient, SaslWriter};
3233
use crate::security::user::UserInfo;
3334
use crate::{HdfsError, Result};
3435

35-
#[cfg(feature = "token")]
36-
use crate::security::sasl::SaslDatanodeConnection;
37-
3836
const PROTOCOL: &str = "org.apache.hadoop.hdfs.protocol.ClientProtocol";
3937
const DATA_TRANSFER_VERSION: u16 = 28;
4038
const MAX_PACKET_HEADER_SIZE: usize = 33;
@@ -531,7 +529,6 @@ impl DatanodeConnection {
531529
let stream = connect(&url).await?;
532530

533531
// If the token has an identifier, we can do SASL negotation
534-
#[cfg(feature = "token")]
535532
let stream = if token.identifier.is_empty() {
536533
stream
537534
} else {

crates/hdfs-native/src/minidfs.rs

-7
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ use which::which;
1010
#[derive(PartialEq, Eq, Hash, Debug)]
1111
pub enum DfsFeatures {
1212
Security,
13-
DataTransferSecurity,
1413
Token,
1514
Privacy,
1615
HA,
@@ -27,7 +26,6 @@ impl DfsFeatures {
2726
DfsFeatures::ViewFS => "viewfs",
2827
DfsFeatures::Privacy => "privacy",
2928
DfsFeatures::Security => "security",
30-
DfsFeatures::DataTransferSecurity => "data_transfer_security",
3129
DfsFeatures::Token => "token",
3230
DfsFeatures::RBF => "rbf",
3331
}
@@ -58,11 +56,6 @@ impl MiniDfs {
5856
for feature in features.iter() {
5957
feature_args.push(feature.as_str());
6058
}
61-
// If the `token` feature is enabled, we need to force the data transfer protection
62-
#[cfg(feature = "token")]
63-
if !features.contains(&DfsFeatures::DataTransferSecurity) {
64-
feature_args.push(DfsFeatures::DataTransferSecurity.as_str());
65-
}
6659

6760
let mut child = Command::new(mvn_exec)
6861
.args([

0 commit comments

Comments
 (0)