From 3edf23ca679017e6c4ba8929fc7896557bdb9e4d Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Wed, 22 Jan 2025 07:11:57 -0500 Subject: [PATCH 01/35] Create testfile with client rather than CLI and drop block size and test file size --- .github/workflows/rust-test.yml | 6 ---- rust/minidfs/src/main/java/main/Main.java | 1 + rust/tests/common/mod.rs | 39 +---------------------- rust/tests/test_integration.rs | 15 +++++++-- rust/tests/test_read.rs | 15 +++++++-- rust/tests/test_write.rs | 9 ++++-- 6 files changed, 32 insertions(+), 53 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 626568f..379f2cf 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -86,11 +86,5 @@ jobs: - name: Install native libs run: sudo apt-get install -y libkrb5-dev krb5-user - - name: Download Hadoop - run: | - wget -q https://dlcdn.apache.org/hadoop/common/hadoop-3.4.0/hadoop-3.4.0.tar.gz - tar -xf hadoop-3.4.0.tar.gz -C $GITHUB_WORKSPACE - echo "$GITHUB_WORKSPACE/hadoop-3.4.0/bin" >> $GITHUB_PATH - - name: Run tests run: cargo test --features integration-test diff --git a/rust/minidfs/src/main/java/main/Main.java b/rust/minidfs/src/main/java/main/Main.java index 934605a..540c619 100644 --- a/rust/minidfs/src/main/java/main/Main.java +++ b/rust/minidfs/src/main/java/main/Main.java @@ -45,6 +45,7 @@ public static void main(String args[]) throws Exception { new File("target/test/delegation_token").delete(); Configuration conf = new Configuration(); + conf.set("dfs.blocksize", "16777216"); // 16 MiB instead of 128 MiB if (flags.contains("security")) { kdc = new MiniKdc(MiniKdc.createConf(), new File("target/test/kdc")); kdc.setTransport("UDP"); diff --git a/rust/tests/common/mod.rs b/rust/tests/common/mod.rs index 047df63..1adcc87 100644 --- a/rust/tests/common/mod.rs +++ b/rust/tests/common/mod.rs @@ -1,44 +1,7 @@ #![allow(dead_code)] use bytes::Buf; -use std::collections::HashSet; -use std::io::{BufWriter, Write}; -use std::process::Command; -use tempfile::NamedTempFile; -use which::which; -use hdfs_native::minidfs::{DfsFeatures, MiniDfs}; - -pub const TEST_FILE_INTS: usize = 64 * 1024 * 1024; - -pub fn setup(features: &HashSet) -> MiniDfs { - let hadoop_exc = which("hadoop").expect("Failed to find hadoop executable"); - - let dfs = MiniDfs::with_features(features); - - let mut file = NamedTempFile::new_in("target/test").unwrap(); - { - let mut writer = BufWriter::new(file.as_file_mut()); - for i in 0..TEST_FILE_INTS as i32 { - let bytes = i.to_be_bytes(); - writer.write_all(&bytes).unwrap(); - } - writer.flush().unwrap(); - } - - let status = Command::new(hadoop_exc) - .args([ - "fs", - "-copyFromLocal", - "-f", - file.path().to_str().unwrap(), - &format!("{}/testfile", dfs.url), - ]) - .status() - .unwrap(); - assert!(status.success()); - - dfs -} +pub const TEST_FILE_INTS: usize = 4 * 1024 * 1024; pub fn assert_bufs_equal(buf1: &impl Buf, buf2: &impl Buf, message: Option) { assert_eq!(buf1.chunk().len(), buf2.chunk().len()); diff --git a/rust/tests/test_integration.rs b/rust/tests/test_integration.rs index 096de74..aaf364a 100644 --- a/rust/tests/test_integration.rs +++ b/rust/tests/test_integration.rs @@ -3,10 +3,13 @@ mod common; #[cfg(feature = "integration-test")] mod test { - use crate::common::{assert_bufs_equal, setup, TEST_FILE_INTS}; + use crate::common::{assert_bufs_equal, TEST_FILE_INTS}; use bytes::{BufMut, BytesMut}; use hdfs_native::{ - acl::AclEntry, client::FileStatus, minidfs::DfsFeatures, Client, Result, WriteOptions, + acl::AclEntry, + client::FileStatus, + minidfs::{DfsFeatures, MiniDfs}, + Client, Result, WriteOptions, }; use serial_test::serial; use std::collections::HashSet; @@ -171,9 +174,15 @@ mod test { pub async fn test_with_features(features: &HashSet) -> Result<()> { let _ = env_logger::builder().is_test(true).try_init(); - let _dfs = setup(features); + let _dfs = MiniDfs::with_features(features); let client = Client::default(); + let mut file = client.create("/testfile", WriteOptions::default()).await?; + for i in 0..TEST_FILE_INTS as i32 { + file.write(i.to_be_bytes().to_vec().into()).await?; + } + file.close().await?; + test_file_info(&client).await?; test_listing(&client).await?; test_rename(&client).await?; diff --git a/rust/tests/test_read.rs b/rust/tests/test_read.rs index 6469357..97e3aec 100644 --- a/rust/tests/test_read.rs +++ b/rust/tests/test_read.rs @@ -3,9 +3,12 @@ mod common; #[cfg(feature = "integration-test")] mod test { - use crate::common::{setup, TEST_FILE_INTS}; + use crate::common::TEST_FILE_INTS; use bytes::Buf; - use hdfs_native::{minidfs::DfsFeatures, Client, Result}; + use hdfs_native::{ + minidfs::{DfsFeatures, MiniDfs}, + Client, Result, WriteOptions, + }; use serial_test::serial; use std::collections::HashSet; @@ -47,9 +50,15 @@ mod test { async fn test_read(features: &HashSet) -> Result<()> { let _ = env_logger::builder().is_test(true).try_init(); - let _dfs = setup(features); + let _dfs = MiniDfs::with_features(features); let client = Client::default(); + let mut file = client.create("/testfile", WriteOptions::default()).await?; + for i in 0..TEST_FILE_INTS as i32 { + file.write(i.to_be_bytes().to_vec().into()).await?; + } + file.close().await?; + // Read the whole file let reader = client.read("/testfile").await?; let mut buf = reader.read_range(0, TEST_FILE_INTS * 4).await?; diff --git a/rust/tests/test_write.rs b/rust/tests/test_write.rs index 3ee43a6..7df272f 100644 --- a/rust/tests/test_write.rs +++ b/rust/tests/test_write.rs @@ -3,9 +3,12 @@ mod common; #[cfg(feature = "integration-test")] mod test { - use crate::common::{assert_bufs_equal, setup}; + use crate::common::assert_bufs_equal; use bytes::{BufMut, BytesMut}; - use hdfs_native::{minidfs::DfsFeatures, Client, Result, WriteOptions}; + use hdfs_native::{ + minidfs::{DfsFeatures, MiniDfs}, + Client, Result, WriteOptions, + }; use serial_test::serial; use std::collections::HashSet; @@ -47,7 +50,7 @@ mod test { async fn test_write(features: &HashSet) -> Result<()> { let _ = env_logger::builder().is_test(true).try_init(); - let _dfs = setup(features); + let _dfs = MiniDfs::with_features(features); let client = Client::default(); test_create(&client).await?; From 0710aa6eb57c44a2e33e9ed1ffb7ee8f4f2e0b32 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Wed, 22 Jan 2025 07:17:20 -0500 Subject: [PATCH 02/35] Keep it for EC tests actually --- .github/workflows/rust-test.yml | 6 ++++++ rust/minidfs/pom.xml | 10 +++++----- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 379f2cf..7ce9570 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -86,5 +86,11 @@ jobs: - name: Install native libs run: sudo apt-get install -y libkrb5-dev krb5-user + - name: Download Hadoop + run: | + wget -q https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz + tar -xf hadoop-3.4.1.tar.gz -C $GITHUB_WORKSPACE + echo "$GITHUB_WORKSPACE/hadoop-3.4.1/bin" >> $GITHUB_PATH + - name: Run tests run: cargo test --features integration-test diff --git a/rust/minidfs/pom.xml b/rust/minidfs/pom.xml index ecd99d4..7ee7e60 100644 --- a/rust/minidfs/pom.xml +++ b/rust/minidfs/pom.xml @@ -11,7 +11,7 @@ org.apache.hadoop hadoop-minicluster - 3.4.0 + 3.4.1 ch.qos.logback @@ -22,23 +22,23 @@ org.apache.hadoop hadoop-minikdc - 3.4.0 + 3.4.1 org.apache.hadoop hadoop-hdfs-rbf - 3.4.0 + 3.4.1 org.apache.hadoop hadoop-hdfs-rbf - 3.4.0 + 3.4.1 test-jar org.apache.hadoop hadoop-federation-balance - 3.4.0 + 3.4.1 junit From c57ca9321a6a97acfd31f61291e0c836b9d7b252 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Wed, 22 Jan 2025 07:18:25 -0500 Subject: [PATCH 03/35] Add simple windows test --- .github/workflows/rust-test.yml | 36 ++++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 7ce9570..bee05c6 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -52,7 +52,7 @@ jobs: - name: Check all features run: cargo check --all-targets --features integration-test,benchmark - test: + test-full: strategy: fail-fast: false matrix: @@ -94,3 +94,37 @@ jobs: - name: Run tests run: cargo test --features integration-test + + test-simple: + strategy: + fail-fast: false + matrix: + os: + # - ubuntu-latest + # - macos-latest + - windows-latest + runs-on: ${{ matrix.os }} + env: + # Disable full debug symbol generation to speed up CI build and keep memory down + # "1" means line tables only, which is useful for panic tracebacks. + RUSTFLAGS: -C debuginfo=1 + RUST_BACKTRACE: "1" + RUST_LOG: debug + + steps: + - uses: actions/checkout@v4 + + - name: Install minimal stable with clippy and rustfmt + uses: dtolnay/rust-toolchain@master + with: + toolchain: stable + + - uses: Swatinem/rust-cache@v2 + + - uses: actions/setup-java@v4 + with: + distribution: "temurin" + java-version: "17" + + - name: Run tests + run: cargo test --features integration-test test_basic_ha From 9b52cdb6ffd7817b93b92d5fbfb7f10a7385554c Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Thu, 23 Jan 2025 18:57:05 -0500 Subject: [PATCH 04/35] Output minidfs sterr --- rust/src/minidfs.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust/src/minidfs.rs b/rust/src/minidfs.rs index 05bf01b..b21988f 100644 --- a/rust/src/minidfs.rs +++ b/rust/src/minidfs.rs @@ -74,7 +74,7 @@ impl MiniDfs { ]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) - .stderr(Stdio::null()) + // .stderr(Stdio::null()) .spawn() .unwrap(); From 0baadd56ab6b8e6f3f8d11db5162e2c802b04652 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Thu, 23 Jan 2025 19:06:40 -0500 Subject: [PATCH 05/35] Try non-ha --- .github/workflows/rust-test.yml | 2 +- rust/tests/test_integration.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index bee05c6..c3ec922 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -127,4 +127,4 @@ jobs: java-version: "17" - name: Run tests - run: cargo test --features integration-test test_basic_ha + run: cargo test --features integration-test test_basic_non_ha diff --git a/rust/tests/test_integration.rs b/rust/tests/test_integration.rs index aaf364a..d5ae3ad 100644 --- a/rust/tests/test_integration.rs +++ b/rust/tests/test_integration.rs @@ -16,7 +16,7 @@ mod test { #[tokio::test] #[serial] - async fn test_basic() { + async fn test_basic_non_ha() { test_with_features(&HashSet::new()).await.unwrap(); } From 051150eada605027730beeeedfa5b6a07b358989 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Thu, 23 Jan 2025 19:20:13 -0500 Subject: [PATCH 06/35] Try more stuff --- .github/workflows/rust-test.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index c3ec922..1ed15ba 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -110,6 +110,7 @@ jobs: RUSTFLAGS: -C debuginfo=1 RUST_BACKTRACE: "1" RUST_LOG: debug + HADOOP_HOME: $GITHUB_WORKSPACE\hadoop-3.4.1\bin steps: - uses: actions/checkout@v4 @@ -126,5 +127,10 @@ jobs: distribution: "temurin" java-version: "17" + - name: Download Hadoop + run: | + Invoke-WebRequest -Uri wget -q https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz -OutFile hadoop-3.4.1.tar.gz + tar -xf hadoop-3.4.1.tar.gz -C %GITHUB_WORKSPACE% + - name: Run tests run: cargo test --features integration-test test_basic_non_ha From 336d7b55a14204611dc0145db2aeb405140889cd Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Thu, 23 Jan 2025 20:13:33 -0500 Subject: [PATCH 07/35] Fix --- .github/workflows/rust-test.yml | 86 ++++++++++++++++----------------- 1 file changed, 43 insertions(+), 43 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 1ed15ba..2f4970d 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -52,48 +52,48 @@ jobs: - name: Check all features run: cargo check --all-targets --features integration-test,benchmark - test-full: - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - # - macos-latest - # - windows-latest - runs-on: ${{ matrix.os }} - env: - # Disable full debug symbol generation to speed up CI build and keep memory down - # "1" means line tables only, which is useful for panic tracebacks. - RUSTFLAGS: -C debuginfo=1 - RUST_BACKTRACE: "1" - RUST_LOG: debug - - steps: - - uses: actions/checkout@v4 - - - name: Install minimal stable with clippy and rustfmt - uses: dtolnay/rust-toolchain@master - with: - toolchain: stable - - - uses: Swatinem/rust-cache@v2 - - - uses: actions/setup-java@v4 - with: - distribution: "temurin" - java-version: "17" - - - name: Install native libs - run: sudo apt-get install -y libkrb5-dev krb5-user - - - name: Download Hadoop - run: | - wget -q https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz - tar -xf hadoop-3.4.1.tar.gz -C $GITHUB_WORKSPACE - echo "$GITHUB_WORKSPACE/hadoop-3.4.1/bin" >> $GITHUB_PATH - - - name: Run tests - run: cargo test --features integration-test + # test-full: + # strategy: + # fail-fast: false + # matrix: + # os: + # - ubuntu-latest + # # - macos-latest + # # - windows-latest + # runs-on: ${{ matrix.os }} + # env: + # # Disable full debug symbol generation to speed up CI build and keep memory down + # # "1" means line tables only, which is useful for panic tracebacks. + # RUSTFLAGS: -C debuginfo=1 + # RUST_BACKTRACE: "1" + # RUST_LOG: debug + + # steps: + # - uses: actions/checkout@v4 + + # - name: Install minimal stable with clippy and rustfmt + # uses: dtolnay/rust-toolchain@master + # with: + # toolchain: stable + + # - uses: Swatinem/rust-cache@v2 + + # - uses: actions/setup-java@v4 + # with: + # distribution: "temurin" + # java-version: "17" + + # - name: Install native libs + # run: sudo apt-get install -y libkrb5-dev krb5-user + + # - name: Download Hadoop + # run: | + # wget -q https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz + # tar -xf hadoop-3.4.1.tar.gz -C $GITHUB_WORKSPACE + # echo "$GITHUB_WORKSPACE/hadoop-3.4.1/bin" >> $GITHUB_PATH + + # - name: Run tests + # run: cargo test --features integration-test test-simple: strategy: @@ -129,7 +129,7 @@ jobs: - name: Download Hadoop run: | - Invoke-WebRequest -Uri wget -q https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz -OutFile hadoop-3.4.1.tar.gz + Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz -OutFile hadoop-3.4.1.tar.gz tar -xf hadoop-3.4.1.tar.gz -C %GITHUB_WORKSPACE% - name: Run tests From fd2a3521c94f231b35f83bdc70a9fb323ebcad59 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Thu, 23 Jan 2025 21:44:25 -0500 Subject: [PATCH 08/35] Try again --- .github/workflows/rust-test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 2f4970d..a38bc44 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -110,7 +110,7 @@ jobs: RUSTFLAGS: -C debuginfo=1 RUST_BACKTRACE: "1" RUST_LOG: debug - HADOOP_HOME: $GITHUB_WORKSPACE\hadoop-3.4.1\bin + HADOOP_HOME: hadoop-3.4.1 steps: - uses: actions/checkout@v4 @@ -130,7 +130,7 @@ jobs: - name: Download Hadoop run: | Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz -OutFile hadoop-3.4.1.tar.gz - tar -xf hadoop-3.4.1.tar.gz -C %GITHUB_WORKSPACE% + tar -xf hadoop-3.4.1.tar.gz - name: Run tests run: cargo test --features integration-test test_basic_non_ha From bcdea98922f465c3328a6c34452a4c66fd0718b8 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Thu, 23 Jan 2025 22:16:00 -0500 Subject: [PATCH 09/35] Again --- .github/workflows/rust-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index a38bc44..9954c0b 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -110,7 +110,7 @@ jobs: RUSTFLAGS: -C debuginfo=1 RUST_BACKTRACE: "1" RUST_LOG: debug - HADOOP_HOME: hadoop-3.4.1 + HADOOP_HOME: "%$GITHUB_WORKSPACE%\\hadoop-3.4.1" steps: - uses: actions/checkout@v4 From cef2a3657377256981d9f470316509c8e6f254e6 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Thu, 23 Jan 2025 22:32:53 -0500 Subject: [PATCH 10/35] Again --- .github/workflows/rust-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 9954c0b..0da1a43 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -110,7 +110,7 @@ jobs: RUSTFLAGS: -C debuginfo=1 RUST_BACKTRACE: "1" RUST_LOG: debug - HADOOP_HOME: "%$GITHUB_WORKSPACE%\\hadoop-3.4.1" + HADOOP_HOME: "$env:GITHUB_WORKSPACE\\hadoop-3.4.1" steps: - uses: actions/checkout@v4 From da47d2f2421eaa02c0ac20e15880f4890d9b3c59 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Fri, 24 Jan 2025 07:08:03 -0500 Subject: [PATCH 11/35] Try setting env var --- .github/workflows/rust-test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 0da1a43..91df0d0 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -110,7 +110,6 @@ jobs: RUSTFLAGS: -C debuginfo=1 RUST_BACKTRACE: "1" RUST_LOG: debug - HADOOP_HOME: "$env:GITHUB_WORKSPACE\\hadoop-3.4.1" steps: - uses: actions/checkout@v4 @@ -130,7 +129,8 @@ jobs: - name: Download Hadoop run: | Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz -OutFile hadoop-3.4.1.tar.gz - tar -xf hadoop-3.4.1.tar.gz + tar -xf hadoop-3.4.1.tar.gz -C $env:GITHUB_WORKSAPCE + echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop3.4.1" >> $GITHUB_ENV - name: Run tests run: cargo test --features integration-test test_basic_non_ha From e026a771b9c721cf1b744f02f2054639e75c257e Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Fri, 24 Jan 2025 07:11:10 -0500 Subject: [PATCH 12/35] typo --- .github/workflows/rust-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 91df0d0..c7010f6 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -129,7 +129,7 @@ jobs: - name: Download Hadoop run: | Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz -OutFile hadoop-3.4.1.tar.gz - tar -xf hadoop-3.4.1.tar.gz -C $env:GITHUB_WORKSAPCE + tar -xf hadoop-3.4.1.tar.gz -C $env:GITHUB_WORKSPACE echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop3.4.1" >> $GITHUB_ENV - name: Run tests From 9e44464cb11734cfe2babfa2f77969826a354d07 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Fri, 24 Jan 2025 07:17:39 -0500 Subject: [PATCH 13/35] Fix env var --- .github/workflows/rust-test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index c7010f6..3a122fe 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -128,9 +128,9 @@ jobs: - name: Download Hadoop run: | - Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz -OutFile hadoop-3.4.1.tar.gz + Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1-lean.tar.gz -OutFile hadoop-3.4.1.tar.gz tar -xf hadoop-3.4.1.tar.gz -C $env:GITHUB_WORKSPACE - echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop3.4.1" >> $GITHUB_ENV + echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop3.4.1" >> $env:GITHUB_ENV - name: Run tests run: cargo test --features integration-test test_basic_non_ha From 6ea711dc117215fc5b5a855d2de1d8d18e9ecf19 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Fri, 24 Jan 2025 07:26:10 -0500 Subject: [PATCH 14/35] again --- .github/workflows/rust-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 3a122fe..4529c42 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -130,7 +130,7 @@ jobs: run: | Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1-lean.tar.gz -OutFile hadoop-3.4.1.tar.gz tar -xf hadoop-3.4.1.tar.gz -C $env:GITHUB_WORKSPACE - echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop3.4.1" >> $env:GITHUB_ENV + echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop-3.4.1" >> $env:GITHUB_ENV - name: Run tests run: cargo test --features integration-test test_basic_non_ha From 7c1f7d8e9048ca34010d0dfb00d18626271d4d4f Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Fri, 24 Jan 2025 07:32:33 -0500 Subject: [PATCH 15/35] So close --- .github/workflows/rust-test.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 4529c42..10679aa 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -131,6 +131,8 @@ jobs: Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1-lean.tar.gz -OutFile hadoop-3.4.1.tar.gz tar -xf hadoop-3.4.1.tar.gz -C $env:GITHUB_WORKSPACE echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop-3.4.1" >> $env:GITHUB_ENV + ls $env:GITHUB_WORKSPACE + ls $env:GITHUB_WORKSPACE\hadoop-3.4.1 - name: Run tests run: cargo test --features integration-test test_basic_non_ha From d77dac860bc8247dd4d8abb678aed626b0777cc2 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Fri, 24 Jan 2025 07:34:24 -0500 Subject: [PATCH 16/35] Look at bin too --- .github/workflows/rust-test.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 10679aa..686b75a 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -133,6 +133,7 @@ jobs: echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop-3.4.1" >> $env:GITHUB_ENV ls $env:GITHUB_WORKSPACE ls $env:GITHUB_WORKSPACE\hadoop-3.4.1 + ls $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin - name: Run tests run: cargo test --features integration-test test_basic_non_ha From 69a97683ab2e29291d1ff92128cac78e801d0514 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Fri, 24 Jan 2025 19:18:24 -0500 Subject: [PATCH 17/35] Fake winutils --- .github/workflows/rust-test.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 686b75a..e441c0b 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -131,8 +131,7 @@ jobs: Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1-lean.tar.gz -OutFile hadoop-3.4.1.tar.gz tar -xf hadoop-3.4.1.tar.gz -C $env:GITHUB_WORKSPACE echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop-3.4.1" >> $env:GITHUB_ENV - ls $env:GITHUB_WORKSPACE - ls $env:GITHUB_WORKSPACE\hadoop-3.4.1 + touch $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin\winutils.exe ls $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin - name: Run tests From 1bb27bb70c61c7676aecd3d6e0172622e3f0fabe Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Fri, 24 Jan 2025 22:10:58 -0500 Subject: [PATCH 18/35] Try real winutils --- .github/workflows/rust-test.yml | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index e441c0b..d82f16b 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -114,6 +114,10 @@ jobs: steps: - uses: actions/checkout@v4 + - uses: actions/checkout@v4 + with: + repository: cdarlint/winutils + - name: Install minimal stable with clippy and rustfmt uses: dtolnay/rust-toolchain@master with: @@ -126,13 +130,17 @@ jobs: distribution: "temurin" java-version: "17" - - name: Download Hadoop - run: | - Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1-lean.tar.gz -OutFile hadoop-3.4.1.tar.gz - tar -xf hadoop-3.4.1.tar.gz -C $env:GITHUB_WORKSPACE - echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop-3.4.1" >> $env:GITHUB_ENV - touch $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin\winutils.exe - ls $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin + # - name: Download Hadoop + # run: | + # Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1-lean.tar.gz -OutFile hadoop-3.4.1.tar.gz + # tar -xf hadoop-3.4.1.tar.gz -C $env:GITHUB_WORKSPACE + # echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop-3.4.1" >> $env:GITHUB_ENV + # touch $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin\winutils.exe + # ls $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin + + - name: Run tests run: cargo test --features integration-test test_basic_non_ha + env: + HADOOP_HOME: ${{ env.GITHUB_WORKSPACE }}\winutils\hadoop-3.3.6 From a71b0fa6eabadae63f569c0abffcd72ad1a4a5c3 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Fri, 24 Jan 2025 22:31:25 -0500 Subject: [PATCH 19/35] Fix var --- .github/workflows/rust-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index d82f16b..18065e8 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -143,4 +143,4 @@ jobs: - name: Run tests run: cargo test --features integration-test test_basic_non_ha env: - HADOOP_HOME: ${{ env.GITHUB_WORKSPACE }}\winutils\hadoop-3.3.6 + HADOOP_HOME: ${{ github.workspace }}\winutils\hadoop-3.3.6 From 98db2b18cefee1d4bf8468ce75e2d0675c8fa552 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Fri, 24 Jan 2025 22:42:47 -0500 Subject: [PATCH 20/35] Update rust-test.yml --- .github/workflows/rust-test.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 18065e8..4b456b8 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -117,6 +117,7 @@ jobs: - uses: actions/checkout@v4 with: repository: cdarlint/winutils + path: winutils - name: Install minimal stable with clippy and rustfmt uses: dtolnay/rust-toolchain@master From e022eff01c5b85135cc00f7ac7f9158918b8b0ce Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sat, 25 Jan 2025 07:32:40 -0500 Subject: [PATCH 21/35] More logging --- .github/workflows/rust-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 4b456b8..8f71474 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -142,6 +142,6 @@ jobs: - name: Run tests - run: cargo test --features integration-test test_basic_non_ha + run: RUST_LOG=debug cargo test --features integration-test test_basic_non_ha -- --nocapture env: HADOOP_HOME: ${{ github.workspace }}\winutils\hadoop-3.3.6 From 34ddaeb359cc97ab53c1229251d6fd18fca77f36 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sat, 25 Jan 2025 09:01:06 -0500 Subject: [PATCH 22/35] Update rust-test.yml --- .github/workflows/rust-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 8f71474..b952671 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -142,6 +142,6 @@ jobs: - name: Run tests - run: RUST_LOG=debug cargo test --features integration-test test_basic_non_ha -- --nocapture + run: cargo test --features integration-test test_basic_non_ha -- --nocapture env: HADOOP_HOME: ${{ github.workspace }}\winutils\hadoop-3.3.6 From 7c41f1bcdf0f07ec5cac01db51d72646fe05a5bc Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sat, 25 Jan 2025 10:23:07 -0500 Subject: [PATCH 23/35] Add some prints --- rust/minidfs/src/main/java/main/Main.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rust/minidfs/src/main/java/main/Main.java b/rust/minidfs/src/main/java/main/Main.java index 540c619..74ac381 100644 --- a/rust/minidfs/src/main/java/main/Main.java +++ b/rust/minidfs/src/main/java/main/Main.java @@ -128,6 +128,7 @@ public static void main(String args[]) throws Exception { hdfsConf.set(FS_DEFAULT_NAME_KEY, "hdfs://127.0.0.1:9000"); } + System.out.println("Waiting for cluster to be active"); dfs.waitActive(); int activeNamenode = 0; @@ -173,6 +174,7 @@ public static void main(String args[]) throws Exception { } } + System.out.println("Writing config file"); hdfsConf.writeXml(new FileOutputStream("target/test/core-site.xml")); System.out.println("Ready!"); @@ -182,6 +184,7 @@ public static void main(String args[]) throws Exception { BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); reader.readLine(); + System.out.println("Read line, closing"); if (dfs != null) { dfs.close(); From 04e756f39c7b237b07724f1da26dcc0509b91f95 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sat, 25 Jan 2025 10:51:53 -0500 Subject: [PATCH 24/35] Update Main.java --- rust/minidfs/src/main/java/main/Main.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/rust/minidfs/src/main/java/main/Main.java b/rust/minidfs/src/main/java/main/Main.java index 74ac381..94cf6af 100644 --- a/rust/minidfs/src/main/java/main/Main.java +++ b/rust/minidfs/src/main/java/main/Main.java @@ -128,7 +128,7 @@ public static void main(String args[]) throws Exception { hdfsConf.set(FS_DEFAULT_NAME_KEY, "hdfs://127.0.0.1:9000"); } - System.out.println("Waiting for cluster to be active"); + System.err.println("Waiting for cluster to be active"); dfs.waitActive(); int activeNamenode = 0; @@ -174,7 +174,7 @@ public static void main(String args[]) throws Exception { } } - System.out.println("Writing config file"); + System.err.println("Writing config file"); hdfsConf.writeXml(new FileOutputStream("target/test/core-site.xml")); System.out.println("Ready!"); @@ -184,8 +184,8 @@ public static void main(String args[]) throws Exception { BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); reader.readLine(); - System.out.println("Read line, closing"); - + System.err.println("Read line, closing"); + if (dfs != null) { dfs.close(); } @@ -219,4 +219,4 @@ public static MiniDFSNNTopology generateTopology(Set flags, Configuratio } return nnTopology; } -} \ No newline at end of file +} From 0f714937bc5beb248691af581c3178ff7dfa9ca1 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sun, 26 Jan 2025 07:31:48 -0500 Subject: [PATCH 25/35] More logging --- rust/minidfs/src/main/java/main/Main.java | 38 +++++++++++++---------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/rust/minidfs/src/main/java/main/Main.java b/rust/minidfs/src/main/java/main/Main.java index 94cf6af..58784f8 100644 --- a/rust/minidfs/src/main/java/main/Main.java +++ b/rust/minidfs/src/main/java/main/Main.java @@ -113,24 +113,30 @@ public static void main(String args[]) throws Exception { numDataNodes = 14; } - dfs = new MiniDFSCluster.Builder(hdfsConf) - .nameNodePort(9000) - .nameNodeHttpPort(9870) - .nnTopology(nnTopology) - .numDataNodes(numDataNodes) - .build(); + System.err.println("!!!!!!!!!!!!!!!!!!!! Building"); + try { + dfs = new MiniDFSCluster.Builder(hdfsConf) + .nameNodePort(9000) + .nameNodeHttpPort(9870) + .nnTopology(nnTopology) + .numDataNodes(numDataNodes) + .build(); + + if (flags.contains("viewfs")) { + hdfsConf.set(FS_DEFAULT_NAME_KEY, "viewfs://minidfs-viewfs"); + } else if (flags.contains("ha")) { + hdfsConf.set(FS_DEFAULT_NAME_KEY, "hdfs://minidfs-ns"); + } else { + hdfsConf.set(FS_DEFAULT_NAME_KEY, "hdfs://127.0.0.1:9000"); + } - if (flags.contains("viewfs")) { - hdfsConf.set(FS_DEFAULT_NAME_KEY, "viewfs://minidfs-viewfs"); - } else if (flags.contains("ha")) { - hdfsConf.set(FS_DEFAULT_NAME_KEY, "hdfs://minidfs-ns"); - } else { - hdfsConf.set(FS_DEFAULT_NAME_KEY, "hdfs://127.0.0.1:9000"); + System.err.println("!!!!!!!!!!!!!!!!!!!! Waiting for cluster to be active"); + dfs.waitActive(); + } catch (Exception e) { + e.printStackTrace(); + throw e; } - System.err.println("Waiting for cluster to be active"); - dfs.waitActive(); - int activeNamenode = 0; if (flags.contains("viewfs")) { // Each name services has two namenodes @@ -174,7 +180,7 @@ public static void main(String args[]) throws Exception { } } - System.err.println("Writing config file"); + System.err.println("!!!!!!!!!!!!!!!!!!!! Writing config file"); hdfsConf.writeXml(new FileOutputStream("target/test/core-site.xml")); System.out.println("Ready!"); From b40a5d622cb4ceae646532a8dc13de80b1ecb103 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sun, 26 Jan 2025 07:41:30 -0500 Subject: [PATCH 26/35] One more log --- rust/minidfs/src/main/java/main/Main.java | 1 + 1 file changed, 1 insertion(+) diff --git a/rust/minidfs/src/main/java/main/Main.java b/rust/minidfs/src/main/java/main/Main.java index 58784f8..ceeb685 100644 --- a/rust/minidfs/src/main/java/main/Main.java +++ b/rust/minidfs/src/main/java/main/Main.java @@ -133,6 +133,7 @@ public static void main(String args[]) throws Exception { System.err.println("!!!!!!!!!!!!!!!!!!!! Waiting for cluster to be active"); dfs.waitActive(); } catch (Exception e) { + System.err.println("!!!!!!!!!!!!!!!!!!!! Caught exception"); e.printStackTrace(); throw e; } From d665a2178dd12856c08c763a6a412b6dddea0fdc Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sun, 26 Jan 2025 07:46:48 -0500 Subject: [PATCH 27/35] ubuntu also --- .github/workflows/rust-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index b952671..83656f1 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -100,7 +100,7 @@ jobs: fail-fast: false matrix: os: - # - ubuntu-latest + - ubuntu-latest # - macos-latest - windows-latest runs-on: ${{ matrix.os }} From 398714f6745c37911125d748decc13060d9f5e2f Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sun, 26 Jan 2025 09:57:22 -0500 Subject: [PATCH 28/35] Try match hadoop version --- rust/minidfs/pom.xml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/rust/minidfs/pom.xml b/rust/minidfs/pom.xml index 7ee7e60..07f32e4 100644 --- a/rust/minidfs/pom.xml +++ b/rust/minidfs/pom.xml @@ -11,7 +11,7 @@ org.apache.hadoop hadoop-minicluster - 3.4.1 + 3.3.6 ch.qos.logback @@ -22,23 +22,23 @@ org.apache.hadoop hadoop-minikdc - 3.4.1 + 3.3.6 org.apache.hadoop hadoop-hdfs-rbf - 3.4.1 + 3.3.6 org.apache.hadoop hadoop-hdfs-rbf - 3.4.1 + 3.3.6 test-jar org.apache.hadoop hadoop-federation-balance - 3.4.1 + 3.3.6 junit From b0ee27628c313c2f1ec10e03d3fbe9f2b1e9e10d Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sun, 26 Jan 2025 10:05:38 -0500 Subject: [PATCH 29/35] Different --- rust/minidfs/pom.xml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/rust/minidfs/pom.xml b/rust/minidfs/pom.xml index 07f32e4..ecd99d4 100644 --- a/rust/minidfs/pom.xml +++ b/rust/minidfs/pom.xml @@ -11,7 +11,7 @@ org.apache.hadoop hadoop-minicluster - 3.3.6 + 3.4.0 ch.qos.logback @@ -22,23 +22,23 @@ org.apache.hadoop hadoop-minikdc - 3.3.6 + 3.4.0 org.apache.hadoop hadoop-hdfs-rbf - 3.3.6 + 3.4.0 org.apache.hadoop hadoop-hdfs-rbf - 3.3.6 + 3.4.0 test-jar org.apache.hadoop hadoop-federation-balance - 3.3.6 + 3.4.0 junit From 1c7cdfe1161e1384d72e3c4de0986a3ed2f8f9df Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sun, 26 Jan 2025 10:34:03 -0500 Subject: [PATCH 30/35] Tryto print failing key --- rust/minidfs/src/main/java/main/Main.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rust/minidfs/src/main/java/main/Main.java b/rust/minidfs/src/main/java/main/Main.java index ceeb685..1a7215f 100644 --- a/rust/minidfs/src/main/java/main/Main.java +++ b/rust/minidfs/src/main/java/main/Main.java @@ -113,6 +113,8 @@ public static void main(String args[]) throws Exception { numDataNodes = 14; } + System.err.println("Failing conf: " + DFSConfigKeys.NNTOP_WINDOWS_MINUTES_KEY + + " = " + conf.get(DFSConfigKeys.NNTOP_WINDOWS_MINUTES_KEY)); System.err.println("!!!!!!!!!!!!!!!!!!!! Building"); try { dfs = new MiniDFSCluster.Builder(hdfsConf) From 207ccadab2cb06f66736b465e27448afed0c01b3 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sun, 26 Jan 2025 12:21:56 -0500 Subject: [PATCH 31/35] Run non integration tests for all OS --- .github/workflows/rust-test.yml | 85 +++++++++++++++++---------------- rust/examples/simple.rs | 1 + 2 files changed, 45 insertions(+), 41 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 83656f1..67ca2b2 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -52,6 +52,9 @@ jobs: - name: Check all features run: cargo check --all-targets --features integration-test,benchmark + - name: Run unit tests + run: cargo test -p hdfs-native + # test-full: # strategy: # fail-fast: false @@ -95,53 +98,53 @@ jobs: # - name: Run tests # run: cargo test --features integration-test - test-simple: - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - # - macos-latest - - windows-latest - runs-on: ${{ matrix.os }} - env: - # Disable full debug symbol generation to speed up CI build and keep memory down - # "1" means line tables only, which is useful for panic tracebacks. - RUSTFLAGS: -C debuginfo=1 - RUST_BACKTRACE: "1" - RUST_LOG: debug + # test-simple: + # strategy: + # fail-fast: false + # matrix: + # os: + # - ubuntu-latest + # # - macos-latest + # - windows-latest + # runs-on: ${{ matrix.os }} + # env: + # # Disable full debug symbol generation to speed up CI build and keep memory down + # # "1" means line tables only, which is useful for panic tracebacks. + # RUSTFLAGS: -C debuginfo=1 + # RUST_BACKTRACE: "1" + # RUST_LOG: debug - steps: - - uses: actions/checkout@v4 + # steps: + # - uses: actions/checkout@v4 - - uses: actions/checkout@v4 - with: - repository: cdarlint/winutils - path: winutils + # - uses: actions/checkout@v4 + # with: + # repository: cdarlint/winutils + # path: winutils - - name: Install minimal stable with clippy and rustfmt - uses: dtolnay/rust-toolchain@master - with: - toolchain: stable + # - name: Install minimal stable with clippy and rustfmt + # uses: dtolnay/rust-toolchain@master + # with: + # toolchain: stable - - uses: Swatinem/rust-cache@v2 + # - uses: Swatinem/rust-cache@v2 - - uses: actions/setup-java@v4 - with: - distribution: "temurin" - java-version: "17" + # - uses: actions/setup-java@v4 + # with: + # distribution: "temurin" + # java-version: "17" - # - name: Download Hadoop - # run: | - # Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1-lean.tar.gz -OutFile hadoop-3.4.1.tar.gz - # tar -xf hadoop-3.4.1.tar.gz -C $env:GITHUB_WORKSPACE - # echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop-3.4.1" >> $env:GITHUB_ENV - # touch $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin\winutils.exe - # ls $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin + # # - name: Download Hadoop + # # run: | + # # Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1-lean.tar.gz -OutFile hadoop-3.4.1.tar.gz + # # tar -xf hadoop-3.4.1.tar.gz -C $env:GITHUB_WORKSPACE + # # echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop-3.4.1" >> $env:GITHUB_ENV + # # touch $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin\winutils.exe + # # ls $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin - - name: Run tests - run: cargo test --features integration-test test_basic_non_ha -- --nocapture - env: - HADOOP_HOME: ${{ github.workspace }}\winutils\hadoop-3.3.6 + # - name: Run tests + # run: cargo test --features integration-test test_basic_non_ha -- --nocapture + # env: + # HADOOP_HOME: ${{ github.workspace }}\winutils\hadoop-3.3.6 diff --git a/rust/examples/simple.rs b/rust/examples/simple.rs index 0ff06f5..333787a 100644 --- a/rust/examples/simple.rs +++ b/rust/examples/simple.rs @@ -1,3 +1,4 @@ +#[cfg(feature = "integration-test")] use std::collections::HashSet; #[cfg(feature = "integration-test")] From 7f5609fd8fa744051fbca282135b4600ad89f103 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sun, 26 Jan 2025 19:32:38 -0500 Subject: [PATCH 32/35] Stop using Path --- .github/workflows/rust-test.yml | 2 +- rust/src/client.rs | 115 +++++++++++++------------------- 2 files changed, 46 insertions(+), 71 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 67ca2b2..3c16503 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -53,7 +53,7 @@ jobs: run: cargo check --all-targets --features integration-test,benchmark - name: Run unit tests - run: cargo test -p hdfs-native + run: cargo test -p hdfs-native --lib # test-full: # strategy: diff --git a/rust/src/client.rs b/rust/src/client.rs index 341f1eb..2f01cb2 100644 --- a/rust/src/client.rs +++ b/rust/src/client.rs @@ -1,5 +1,4 @@ use std::collections::{HashMap, VecDeque}; -use std::path::{Path, PathBuf}; use std::sync::Arc; use futures::stream::BoxStream; @@ -86,8 +85,8 @@ impl WriteOptions { #[derive(Debug, Clone)] struct MountLink { - viewfs_path: PathBuf, - hdfs_path: PathBuf, + viewfs_path: String, + hdfs_path: String, protocol: Arc, } @@ -95,25 +94,23 @@ impl MountLink { fn new(viewfs_path: &str, hdfs_path: &str, protocol: Arc) -> Self { // We should never have an empty path, we always want things mounted at root ("/") by default. Self { - viewfs_path: PathBuf::from(if viewfs_path.is_empty() { - "/" - } else { - viewfs_path - }), - hdfs_path: PathBuf::from(if hdfs_path.is_empty() { "/" } else { hdfs_path }), + viewfs_path: viewfs_path.trim_end_matches("/").to_string(), + hdfs_path: hdfs_path.trim_end_matches("/").to_string(), protocol, } } /// Convert a viewfs path into a name service path if it matches this link - fn resolve(&self, path: &Path) -> Option { - if let Ok(relative_path) = path.strip_prefix(&self.viewfs_path) { - if relative_path.components().count() == 0 { - Some(self.hdfs_path.clone()) - } else { - Some(self.hdfs_path.join(relative_path)) - } + fn resolve(&self, path: &str) -> Option { + // Make sure we don't partially match the last component. It either needs to be an exact + // match to a viewfs path, or needs to match with a trailing slash + if path == self.viewfs_path { + Some(self.hdfs_path.clone()) } else { - None + path.strip_prefix(&format!("{}/", self.viewfs_path)) + .map(|relative_path| { + println!("Resolving {} and {}", self.hdfs_path, relative_path); + format!("{}/{}", &self.hdfs_path, relative_path) + }) } } } @@ -126,20 +123,12 @@ struct MountTable { impl MountTable { fn resolve(&self, src: &str) -> (&MountLink, String) { - let path = Path::new(src); for link in self.mounts.iter() { - if let Some(resolved) = link.resolve(path) { - return (link, resolved.to_string_lossy().into()); + if let Some(resolved) = link.resolve(src) { + return (link, resolved); } } - ( - &self.fallback, - self.fallback - .resolve(path) - .unwrap() - .to_string_lossy() - .into(), - ) + (&self.fallback, self.fallback.resolve(src).unwrap()) } } @@ -246,7 +235,7 @@ impl Client { if let Some(fallback) = fallback { // Sort the mount table from longest viewfs path to shortest. This makes sure more specific paths are considered first. - mounts.sort_by_key(|m| m.viewfs_path.components().count()); + mounts.sort_by_key(|m| m.viewfs_path.chars().filter(|c| *c == '/').count()); mounts.reverse(); Ok(MountTable { mounts, fallback }) @@ -719,19 +708,20 @@ pub struct FileStatus { impl FileStatus { fn from(value: HdfsFileStatusProto, base_path: &str) -> Self { - let mut path = PathBuf::from(base_path); - if let Ok(relative_path) = std::str::from_utf8(&value.path) { - if !relative_path.is_empty() { - path.push(relative_path) - } + let mut path = base_path.trim_end_matches("/").to_string(); + let relative_path = std::str::from_utf8(&value.path).unwrap(); + println!( + "Creating file status from {} and {}", + base_path, relative_path + ); + if !relative_path.is_empty() { + path.push('/'); + path.push_str(relative_path); } FileStatus { isdir: value.file_type() == FileType::IsDir, - path: path - .to_str() - .map(|x| x.to_string()) - .unwrap_or(String::new()), + path, length: value.length as usize, permission: value.permission.perm as u16, owner: value.owner, @@ -769,10 +759,7 @@ impl From for ContentSummary { #[cfg(test)] mod test { - use std::{ - path::{Path, PathBuf}, - sync::Arc, - }; + use std::sync::Arc; use url::Url; @@ -837,34 +824,22 @@ mod test { let protocol = create_protocol("hdfs://127.0.0.1:9000"); let link = MountLink::new("/view", "/hdfs", protocol); - assert_eq!( - link.resolve(Path::new("/view/dir/file")).unwrap(), - PathBuf::from("/hdfs/dir/file") - ); - assert_eq!( - link.resolve(Path::new("/view")).unwrap(), - PathBuf::from("/hdfs") - ); - assert!(link.resolve(Path::new("/hdfs/path")).is_none()); + assert_eq!(link.resolve("/view/dir/file").unwrap(), "/hdfs/dir/file"); + assert_eq!(link.resolve("/view").unwrap(), "/hdfs"); + assert!(link.resolve("/hdfs/path").is_none()); } #[test] fn test_fallback_link() { let protocol = create_protocol("hdfs://127.0.0.1:9000"); - let link = MountLink::new("", "/hdfs", protocol); + let link = MountLink::new("", "/hdfs", Arc::clone(&protocol)); - assert_eq!( - link.resolve(Path::new("/path/to/file")).unwrap(), - PathBuf::from("/hdfs/path/to/file") - ); - assert_eq!( - link.resolve(Path::new("/")).unwrap(), - PathBuf::from("/hdfs") - ); - assert_eq!( - link.resolve(Path::new("/hdfs/path")).unwrap(), - PathBuf::from("/hdfs/hdfs/path") - ); + assert_eq!(link.resolve("/path/to/file").unwrap(), "/hdfs/path/to/file"); + assert_eq!(link.resolve("/").unwrap(), "/hdfs/"); + assert_eq!(link.resolve("/hdfs/path").unwrap(), "/hdfs/hdfs/path"); + + let link = MountLink::new("", "", protocol); + assert_eq!(link.resolve("/").unwrap(), "/"); } #[test] @@ -893,25 +868,25 @@ mod test { // Exact mount path resolves to the exact HDFS path let (link, resolved) = mount_table.resolve("/mount1"); - assert_eq!(link.viewfs_path, Path::new("/mount1")); + assert_eq!(link.viewfs_path, "/mount1"); assert_eq!(resolved, "/path1/nested"); // Trailing slash is treated the same let (link, resolved) = mount_table.resolve("/mount1/"); - assert_eq!(link.viewfs_path, Path::new("/mount1")); - assert_eq!(resolved, "/path1/nested"); + assert_eq!(link.viewfs_path, "/mount1"); + assert_eq!(resolved, "/path1/nested/"); // Doesn't do partial matches on a directory name let (link, resolved) = mount_table.resolve("/mount12"); - assert_eq!(link.viewfs_path, Path::new("/")); + assert_eq!(link.viewfs_path, ""); assert_eq!(resolved, "/path4/mount12"); let (link, resolved) = mount_table.resolve("/mount3/file"); - assert_eq!(link.viewfs_path, Path::new("/")); + assert_eq!(link.viewfs_path, ""); assert_eq!(resolved, "/path4/mount3/file"); let (link, resolved) = mount_table.resolve("/mount3/nested/file"); - assert_eq!(link.viewfs_path, Path::new("/mount3/nested")); + assert_eq!(link.viewfs_path, "/mount3/nested"); assert_eq!(resolved, "/path3/file"); } } From 0d22fe70a1fb07948f3069f7d9f2d00f7cdbf58d Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sun, 26 Jan 2025 19:37:25 -0500 Subject: [PATCH 33/35] Cleanup and shrink ec test size --- .github/workflows/rust-test.yml | 133 +++++++--------------- rust/minidfs/src/main/java/main/Main.java | 39 +++---- rust/src/client.rs | 9 +- rust/tests/test_ec.rs | 3 - 4 files changed, 55 insertions(+), 129 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index 3c16503..ca255ed 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -55,96 +55,43 @@ jobs: - name: Run unit tests run: cargo test -p hdfs-native --lib - # test-full: - # strategy: - # fail-fast: false - # matrix: - # os: - # - ubuntu-latest - # # - macos-latest - # # - windows-latest - # runs-on: ${{ matrix.os }} - # env: - # # Disable full debug symbol generation to speed up CI build and keep memory down - # # "1" means line tables only, which is useful for panic tracebacks. - # RUSTFLAGS: -C debuginfo=1 - # RUST_BACKTRACE: "1" - # RUST_LOG: debug - - # steps: - # - uses: actions/checkout@v4 - - # - name: Install minimal stable with clippy and rustfmt - # uses: dtolnay/rust-toolchain@master - # with: - # toolchain: stable - - # - uses: Swatinem/rust-cache@v2 - - # - uses: actions/setup-java@v4 - # with: - # distribution: "temurin" - # java-version: "17" - - # - name: Install native libs - # run: sudo apt-get install -y libkrb5-dev krb5-user - - # - name: Download Hadoop - # run: | - # wget -q https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz - # tar -xf hadoop-3.4.1.tar.gz -C $GITHUB_WORKSPACE - # echo "$GITHUB_WORKSPACE/hadoop-3.4.1/bin" >> $GITHUB_PATH - - # - name: Run tests - # run: cargo test --features integration-test - - # test-simple: - # strategy: - # fail-fast: false - # matrix: - # os: - # - ubuntu-latest - # # - macos-latest - # - windows-latest - # runs-on: ${{ matrix.os }} - # env: - # # Disable full debug symbol generation to speed up CI build and keep memory down - # # "1" means line tables only, which is useful for panic tracebacks. - # RUSTFLAGS: -C debuginfo=1 - # RUST_BACKTRACE: "1" - # RUST_LOG: debug - - # steps: - # - uses: actions/checkout@v4 - - # - uses: actions/checkout@v4 - # with: - # repository: cdarlint/winutils - # path: winutils - - # - name: Install minimal stable with clippy and rustfmt - # uses: dtolnay/rust-toolchain@master - # with: - # toolchain: stable - - # - uses: Swatinem/rust-cache@v2 - - # - uses: actions/setup-java@v4 - # with: - # distribution: "temurin" - # java-version: "17" - - # # - name: Download Hadoop - # # run: | - # # Invoke-WebRequest -Uri https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1-lean.tar.gz -OutFile hadoop-3.4.1.tar.gz - # # tar -xf hadoop-3.4.1.tar.gz -C $env:GITHUB_WORKSPACE - # # echo "HADOOP_HOME=$env:GITHUB_WORKSPACE\hadoop-3.4.1" >> $env:GITHUB_ENV - # # touch $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin\winutils.exe - # # ls $env:GITHUB_WORKSPACE\hadoop-3.4.1\bin - - - - # - name: Run tests - # run: cargo test --features integration-test test_basic_non_ha -- --nocapture - # env: - # HADOOP_HOME: ${{ github.workspace }}\winutils\hadoop-3.3.6 + test-integration: + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + runs-on: ${{ matrix.os }} + env: + # Disable full debug symbol generation to speed up CI build and keep memory down + # "1" means line tables only, which is useful for panic tracebacks. + RUSTFLAGS: -C debuginfo=1 + RUST_BACKTRACE: "1" + RUST_LOG: debug + + steps: + - uses: actions/checkout@v4 + + - name: Install minimal stable with clippy and rustfmt + uses: dtolnay/rust-toolchain@master + with: + toolchain: stable + + - uses: Swatinem/rust-cache@v2 + + - uses: actions/setup-java@v4 + with: + distribution: "temurin" + java-version: "17" + + - name: Install native libs + run: sudo apt-get install -y libkrb5-dev krb5-user + + - name: Download Hadoop + run: | + wget -q https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz + tar -xf hadoop-3.4.1.tar.gz -C $GITHUB_WORKSPACE + echo "$GITHUB_WORKSPACE/hadoop-3.4.1/bin" >> $GITHUB_PATH + + - name: Run tests + run: cargo test --features integration-test diff --git a/rust/minidfs/src/main/java/main/Main.java b/rust/minidfs/src/main/java/main/Main.java index 1a7215f..db42bd2 100644 --- a/rust/minidfs/src/main/java/main/Main.java +++ b/rust/minidfs/src/main/java/main/Main.java @@ -113,33 +113,23 @@ public static void main(String args[]) throws Exception { numDataNodes = 14; } - System.err.println("Failing conf: " + DFSConfigKeys.NNTOP_WINDOWS_MINUTES_KEY + - " = " + conf.get(DFSConfigKeys.NNTOP_WINDOWS_MINUTES_KEY)); - System.err.println("!!!!!!!!!!!!!!!!!!!! Building"); - try { - dfs = new MiniDFSCluster.Builder(hdfsConf) - .nameNodePort(9000) - .nameNodeHttpPort(9870) - .nnTopology(nnTopology) - .numDataNodes(numDataNodes) - .build(); - - if (flags.contains("viewfs")) { - hdfsConf.set(FS_DEFAULT_NAME_KEY, "viewfs://minidfs-viewfs"); - } else if (flags.contains("ha")) { - hdfsConf.set(FS_DEFAULT_NAME_KEY, "hdfs://minidfs-ns"); - } else { - hdfsConf.set(FS_DEFAULT_NAME_KEY, "hdfs://127.0.0.1:9000"); - } + dfs = new MiniDFSCluster.Builder(hdfsConf) + .nameNodePort(9000) + .nameNodeHttpPort(9870) + .nnTopology(nnTopology) + .numDataNodes(numDataNodes) + .build(); - System.err.println("!!!!!!!!!!!!!!!!!!!! Waiting for cluster to be active"); - dfs.waitActive(); - } catch (Exception e) { - System.err.println("!!!!!!!!!!!!!!!!!!!! Caught exception"); - e.printStackTrace(); - throw e; + if (flags.contains("viewfs")) { + hdfsConf.set(FS_DEFAULT_NAME_KEY, "viewfs://minidfs-viewfs"); + } else if (flags.contains("ha")) { + hdfsConf.set(FS_DEFAULT_NAME_KEY, "hdfs://minidfs-ns"); + } else { + hdfsConf.set(FS_DEFAULT_NAME_KEY, "hdfs://127.0.0.1:9000"); } + dfs.waitActive(); + int activeNamenode = 0; if (flags.contains("viewfs")) { // Each name services has two namenodes @@ -183,7 +173,6 @@ public static void main(String args[]) throws Exception { } } - System.err.println("!!!!!!!!!!!!!!!!!!!! Writing config file"); hdfsConf.writeXml(new FileOutputStream("target/test/core-site.xml")); System.out.println("Ready!"); diff --git a/rust/src/client.rs b/rust/src/client.rs index 2f01cb2..ef2d83f 100644 --- a/rust/src/client.rs +++ b/rust/src/client.rs @@ -107,10 +107,7 @@ impl MountLink { Some(self.hdfs_path.clone()) } else { path.strip_prefix(&format!("{}/", self.viewfs_path)) - .map(|relative_path| { - println!("Resolving {} and {}", self.hdfs_path, relative_path); - format!("{}/{}", &self.hdfs_path, relative_path) - }) + .map(|relative_path| format!("{}/{}", &self.hdfs_path, relative_path)) } } } @@ -710,10 +707,6 @@ impl FileStatus { fn from(value: HdfsFileStatusProto, base_path: &str) -> Self { let mut path = base_path.trim_end_matches("/").to_string(); let relative_path = std::str::from_utf8(&value.path).unwrap(); - println!( - "Creating file status from {} and {}", - base_path, relative_path - ); if !relative_path.is_empty() { path.push('/'); path.push_str(relative_path); diff --git a/rust/tests/test_ec.rs b/rust/tests/test_ec.rs index 9f895ce..a0056e7 100644 --- a/rust/tests/test_ec.rs +++ b/rust/tests/test_ec.rs @@ -88,9 +88,6 @@ mod test { CELL_SIZE * data_units * 5, // Five "rows" of cells CELL_SIZE * data_units * 5 - 4, CELL_SIZE * data_units * 5 + 4, - 128 * CELL_SIZE, - 128 * CELL_SIZE - 4, - 128 * CELL_SIZE + 4, ] } #[tokio::test] From 50d6d391850e8a1da79a45f5c544be3e06b42b30 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sun, 26 Jan 2025 19:58:29 -0500 Subject: [PATCH 34/35] Try to fix tests --- rust/src/common/config.rs | 7 ++++++- rust/src/security/user.rs | 19 +++++-------------- 2 files changed, 11 insertions(+), 15 deletions(-) diff --git a/rust/src/common/config.rs b/rust/src/common/config.rs index 1562018..f2640ca 100644 --- a/rust/src/common/config.rs +++ b/rust/src/common/config.rs @@ -188,6 +188,10 @@ impl Configuration { #[cfg(test)] mod test { + use std::net::IpAddr; + + use dns_lookup::lookup_addr; + use crate::common::config::DFS_CLIENT_FAILOVER_RESOLVER_USE_FQDN; use super::{ @@ -282,8 +286,9 @@ mod test { }; let urls = config.get_urls_for_nameservice("test").unwrap(); + let fqdn = lookup_addr(&IpAddr::from([127, 0, 0, 1])).unwrap(); assert_eq!(urls.len(), 1, "{:?}", urls); - assert_eq!(urls[0], "localhost:9000"); + assert_eq!(urls[0], format!("{}:9000", fqdn)); config.map.insert( format!("{}.{}", DFS_CLIENT_FAILOVER_RESOLVER_USE_FQDN, "test"), diff --git a/rust/src/security/user.rs b/rust/src/security/user.rs index da10885..ce6d694 100644 --- a/rust/src/security/user.rs +++ b/rust/src/security/user.rs @@ -5,6 +5,7 @@ use prost::Message; use std::env; use std::fs; use std::io; +use std::path::Path; use std::path::PathBuf; use whoami::username; @@ -180,12 +181,12 @@ pub struct Token { impl Token { fn load_tokens() -> Vec { match env::var(HADOOP_TOKEN_FILE_LOCATION).map(PathBuf::from) { - Ok(path) if path.exists() => Self::read_token_file(path).ok().unwrap_or_default(), + Ok(path) if path.exists() => Self::read_token_file(&path).ok().unwrap_or_default(), _ => Vec::new(), } } - fn read_token_file(path: PathBuf) -> std::io::Result> { + fn read_token_file(path: &Path) -> std::io::Result> { let mut content = Bytes::from(fs::read(path)?); let magic = content.copy_to_bytes(4); @@ -425,12 +426,7 @@ mod tests { .unwrap(); token_file.flush().unwrap(); - env::set_var( - HADOOP_TOKEN_FILE_LOCATION, - token_file.path().to_str().unwrap(), - ); - - let tokens = Token::load_tokens(); + let tokens = Token::read_token_file(token_file.path()).unwrap(); assert_eq!(tokens.len(), 1); assert_eq!(tokens[0].kind, "HDFS_DELEGATION_TOKEN"); @@ -455,12 +451,7 @@ mod tests { .unwrap(); token_file.flush().unwrap(); - env::set_var( - HADOOP_TOKEN_FILE_LOCATION, - token_file.path().to_str().unwrap(), - ); - - let tokens = Token::load_tokens(); + let tokens = Token::read_token_file(token_file.path()).unwrap(); assert_eq!(tokens.len(), 1); assert_eq!(tokens[0].kind, "HDFS_DELEGATION_TOKEN"); From 8c7876a0f743d37c08736f2c77a7c87ab6c2b306 Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Sun, 26 Jan 2025 20:05:03 -0500 Subject: [PATCH 35/35] Final cleanup --- .github/workflows/rust-test.yml | 4 ++-- rust/minidfs/pom.xml | 10 +++++----- rust/minidfs/src/main/java/main/Main.java | 1 - rust/src/minidfs.rs | 2 +- 4 files changed, 8 insertions(+), 9 deletions(-) diff --git a/.github/workflows/rust-test.yml b/.github/workflows/rust-test.yml index ca255ed..c7141ab 100644 --- a/.github/workflows/rust-test.yml +++ b/.github/workflows/rust-test.yml @@ -89,8 +89,8 @@ jobs: - name: Download Hadoop run: | - wget -q https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1.tar.gz - tar -xf hadoop-3.4.1.tar.gz -C $GITHUB_WORKSPACE + wget -q https://dlcdn.apache.org/hadoop/common/hadoop-3.4.1/hadoop-3.4.1-lean.tar.gz + tar -xf hadoop-3.4.1-lean.tar.gz -C $GITHUB_WORKSPACE echo "$GITHUB_WORKSPACE/hadoop-3.4.1/bin" >> $GITHUB_PATH - name: Run tests diff --git a/rust/minidfs/pom.xml b/rust/minidfs/pom.xml index ecd99d4..7ee7e60 100644 --- a/rust/minidfs/pom.xml +++ b/rust/minidfs/pom.xml @@ -11,7 +11,7 @@ org.apache.hadoop hadoop-minicluster - 3.4.0 + 3.4.1 ch.qos.logback @@ -22,23 +22,23 @@ org.apache.hadoop hadoop-minikdc - 3.4.0 + 3.4.1 org.apache.hadoop hadoop-hdfs-rbf - 3.4.0 + 3.4.1 org.apache.hadoop hadoop-hdfs-rbf - 3.4.0 + 3.4.1 test-jar org.apache.hadoop hadoop-federation-balance - 3.4.0 + 3.4.1 junit diff --git a/rust/minidfs/src/main/java/main/Main.java b/rust/minidfs/src/main/java/main/Main.java index db42bd2..5d0ca5b 100644 --- a/rust/minidfs/src/main/java/main/Main.java +++ b/rust/minidfs/src/main/java/main/Main.java @@ -182,7 +182,6 @@ public static void main(String args[]) throws Exception { BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); reader.readLine(); - System.err.println("Read line, closing"); if (dfs != null) { dfs.close(); diff --git a/rust/src/minidfs.rs b/rust/src/minidfs.rs index b21988f..05bf01b 100644 --- a/rust/src/minidfs.rs +++ b/rust/src/minidfs.rs @@ -74,7 +74,7 @@ impl MiniDfs { ]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) - // .stderr(Stdio::null()) + .stderr(Stdio::null()) .spawn() .unwrap();