Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 24 additions & 15 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "hdfs-native-object-store"
version = "0.14.2"
version = "0.15.0"
edition = "2021"
authors = ["Adam Binford <[email protected]>"]
homepage = "https://github.com/datafusion-contrib/hdfs-native-object-store"
Expand All @@ -15,7 +15,7 @@ async-trait = "0.1"
bytes = "1"
chrono = "0.4"
futures = "0.3"
hdfs-native = "0.11"
hdfs-native = "0.12"
object_store = "0.12.2"
thiserror = "2"
tokio = { version = "1", features = ["rt", "net", "io-util", "macros", "sync", "time"] }
Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ Each release supports a certain minor release of both the `object_store` crate a
|0.12.x|>=0.10, <0.12|0.10|
|0.13.x|>=0.10, <0.12|0.11|
|0.14.x|0.12|0.11|
|0.15.x|0.12|0.12|

# Usage
```rust
Expand All @@ -20,4 +21,4 @@ let store = HdfsObjectStore::with_url("hdfs://localhost:9000")?;
```

# Documentation
See [Documentation](https://docs.rs/hdfs-native-object-store).
See [Documentation](https://docs.rs/hdfs-native-object-store).
21 changes: 16 additions & 5 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ use futures::{
stream::{BoxStream, StreamExt},
FutureExt,
};
use hdfs_native::{client::FileStatus, file::FileWriter, Client, HdfsError, WriteOptions};
use hdfs_native::{
client::FileStatus, file::FileWriter, Client, ClientBuilder, HdfsError, WriteOptions,
};
#[allow(deprecated)]
use object_store::{
path::Path, GetOptions, GetResult, GetResultPayload, ListResult, MultipartUpload, ObjectMeta,
Expand Down Expand Up @@ -60,9 +62,9 @@ impl HdfsObjectStore {
///
/// ```rust
/// # use std::sync::Arc;
/// use hdfs_native::Client;
/// use hdfs_native::ClientBuilder;
/// # use hdfs_native_object_store::HdfsObjectStore;
/// let client = Client::new("hdfs://127.0.0.1:9000").unwrap();
/// let client = ClientBuilder::new().with_url("hdfs://127.0.0.1:9000").build().unwrap();
/// let store = HdfsObjectStore::new(Arc::new(client));
/// ```
pub fn new(client: Arc<Client>) -> Self {
Expand All @@ -80,7 +82,12 @@ impl HdfsObjectStore {
/// # }
/// ```
pub fn with_url(url: &str) -> Result<Self> {
Ok(Self::new(Arc::new(Client::new(url).to_object_store_err()?)))
Ok(Self::new(Arc::new(
ClientBuilder::new()
.with_url(url)
.build()
.to_object_store_err()?,
)))
}

/// Creates a new HdfsObjectStore using the specified URL and Hadoop configs.
Expand All @@ -101,7 +108,11 @@ impl HdfsObjectStore {
/// ```
pub fn with_config(url: &str, config: HashMap<String, String>) -> Result<Self> {
Ok(Self::new(Arc::new(
Client::new_with_config(url, config).to_object_store_err()?,
ClientBuilder::new()
.with_url(url)
.with_config(config)
.build()
.to_object_store_err()?,
)))
}

Expand Down