16
16
//! `kerberos` - Enables Kerberos authentication support via the [libgssapi](https://docs.rs/libgssapi/latest/libgssapi) crate
17
17
//!
18
18
use std:: {
19
+ collections:: HashMap ,
19
20
fmt:: { Display , Formatter } ,
20
21
future,
21
22
path:: PathBuf ,
@@ -39,6 +40,10 @@ use tokio::{
39
40
task:: { self , JoinHandle } ,
40
41
} ;
41
42
43
+ // Re-export minidfs for down-stream integration tests
44
+ #[ cfg( feature = "integration-test" ) ]
45
+ pub use hdfs_native:: minidfs;
46
+
42
47
#[ derive( Debug ) ]
43
48
pub struct HdfsObjectStore {
44
49
client : Arc < Client > ,
@@ -78,9 +83,29 @@ impl HdfsObjectStore {
78
83
/// # }
79
84
/// ```
80
85
pub fn with_url ( url : & str ) -> Result < Self > {
81
- Ok ( Self {
82
- client : Arc :: new ( Client :: new ( url) . to_object_store_err ( ) ?) ,
83
- } )
86
+ Ok ( Self :: new ( Arc :: new ( Client :: new ( url) . to_object_store_err ( ) ?) ) )
87
+ }
88
+
89
+ /// Creates a new HdfsObjectStore using the specified URL and Hadoop configs.
90
+ ///
91
+ /// Connect to a NameService
92
+ /// ```rust
93
+ /// # use hdfs_native_object_store::HdfsObjectStore;
94
+ /// # use std::collections::HashMap;
95
+ /// # fn main() -> object_store::Result<()> {
96
+ /// let config = HashMap::from([
97
+ /// ("dfs.ha.namenodes.ns".to_string(), "nn1,nn2".to_string()),
98
+ /// ("dfs.namenode.rpc-address.nn1".to_string(), "nn1.example.com:9000".to_string()),
99
+ /// ("dfs.namenode.rpc-address.nn2".to_string(), "nn2.example.com:9000".to_string()),
100
+ /// ]);
101
+ /// let store = HdfsObjectStore::with_config("hdfs://ns", config)?;
102
+ /// # Ok(())
103
+ /// # }
104
+ /// ```
105
+ pub fn with_config ( url : & str , config : HashMap < String , String > ) -> Result < Self > {
106
+ Ok ( Self :: new ( Arc :: new (
107
+ Client :: new_with_config ( url, config) . to_object_store_err ( ) ?,
108
+ ) ) )
84
109
}
85
110
86
111
async fn internal_copy ( & self , from : & Path , to : & Path , overwrite : bool ) -> Result < ( ) > {
0 commit comments