Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,34 @@
# Changelog

## 0.47.0 - 2026-04-15

### Enhancements
- Replaced `typed-builder` dependency with `bon` for all parameter builder structs
- Added `maybe_` prefixed setters for all `Option` fields on parameter builders, allowing
callers to pass `Option<T>` directly (e.g. `maybe_start(Some(datetime))` or `maybe_start(None)`)
- Upgraded DBN version to 0.54.0:
- Added `RecordBuf`, an owned stack-allocated buffer for holding a DBN record of
dynamic type. Complements `RecordRef` (borrowed, dynamic) and concrete types
(owned, static). Supports `get()`, `try_get()`, `set()`, `upgrade()`, and
cross-capacity `PartialEq`
- Added `RecordRefMut` for mutable non-owning references to DBN records
- Added `RecordRef::to_owned()` and `RecordRefMut::to_owned()` for converting
borrowed records to an owned `RecordBuf`
- Added `Hash`, `PartialEq`, and `Eq` for `RecordRef`, including cross-type equality
between `RecordBuf` and `RecordRef`
- Changed `RecordHeader` to be `Copy`

### Breaking changes
- Changed `use_snapshot()` setter on `Subscription` builder to require an explicit `bool`
argument: `.use_snapshot()` becomes `.use_snapshot(true)`
- Changed `map_symbols` field in `SubmitJobParams` from `bool` to `Option<bool>`
- Changed `limit()` setter on `GetRangeParams`, `GetRangeToFileParams`, `GetQueryParams`, and
`SubmitJobParams` builders to accept `NonZeroU64` instead of `Option<NonZeroU64>`.
Use `maybe_limit()` for the previous behavior of passing an `Option` directly

### Bug fixes
- Removed `#[doc(hidden)]` from `Subscription::use_snapshot`

## 0.46.0 - 2026-04-07

### Enhancements
Expand Down
6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[package]
name = "databento"
authors = ["Databento <support@databento.com>"]
version = "0.46.0"
version = "0.47.0"
edition = "2021"
repository = "https://github.com/databento/databento-rs"
description = "Official Databento client library"
Expand Down Expand Up @@ -33,7 +33,7 @@ live = ["tokio/net", "tokio/time"]
chrono = ["dep:chrono"]

[dependencies]
dbn = { version = "0.53", features = ["async", "serde"] }
dbn = { version = "0.54", features = ["async", "serde"] }

async-compression = { version = "0.4", features = ["tokio", "zstd"], optional = true }
chrono = { version = ">=0.4.34", optional = true, default-features = false, features = ["alloc"] }
Expand All @@ -52,7 +52,7 @@ tokio = { version = ">=1.41", features = ["io-util", "macros"] }
# Stream utils
tokio-util = { version = "0.7", features = ["io"], optional = true }
tracing = "0.1"
typed-builder = "0.23"
bon = "3"
zstd = { version = "0.13", optional = true }

[dev-dependencies]
Expand Down
2 changes: 1 addition & 1 deletion examples/live_smoke_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ async fn run_with_snapshot(args: Args, mut client: LiveClient) -> anyhow::Result
.schema(args.schema)
.symbols(args.symbols)
.stype_in(args.stype)
.use_snapshot()
.use_snapshot(true)
.build(),
)
.await?;
Expand Down
47 changes: 25 additions & 22 deletions src/historical/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ use tokio::{
io::{AsyncReadExt, BufWriter},
};
use tracing::{debug, error, info, info_span, instrument, warn, Instrument};
use typed_builder::TypedBuilder;

use crate::{
deserialize::{deserialize_date_time, deserialize_opt_date_time},
Expand Down Expand Up @@ -73,7 +72,13 @@ impl BatchClient<'_> {
("compression", params.compression.to_string()),
("pretty_px", params.pretty_px.to_string()),
("pretty_ts", params.pretty_ts.to_string()),
("map_symbols", params.map_symbols.to_string()),
(
"map_symbols",
params
.map_symbols
.unwrap_or(params.encoding != Encoding::Dbn)
.to_string(),
),
("split_symbols", params.split_symbols.to_string()),
("delivery", params.delivery.to_string()),
("stype_in", params.stype_in.to_string()),
Expand Down Expand Up @@ -375,20 +380,20 @@ pub enum JobState {

/// The parameters for [`BatchClient::submit_job()`]. Use [`SubmitJobParams::builder()`] to
/// get a builder type with all the preset defaults.
#[derive(Debug, Clone, TypedBuilder, PartialEq, Eq)]
#[derive(Debug, Clone, bon::Builder, PartialEq, Eq)]
pub struct SubmitJobParams {
/// The dataset code.
#[builder(setter(transform = |dt: impl ToString| dt.to_string()))]
#[builder(with = |d: impl ToString| d.to_string())]
pub dataset: String,
/// The symbols to filter for.
#[builder(setter(into))]
#[builder(into)]
pub symbols: Symbols,
/// The data record schema.
pub schema: Schema,
/// The request range with an inclusive start and an exclusive end.
///
/// Filters on `ts_recv` if it exists in the schema, otherwise `ts_event`.
#[builder(setter(into))]
#[builder(into)]
pub date_time_range: DateTimeRange,
/// The data encoding. Defaults to [`Dbn`](Encoding::Dbn).
#[builder(default = Encoding::Dbn)]
Expand All @@ -405,10 +410,9 @@ pub struct SubmitJobParams {
#[builder(default)]
pub pretty_ts: bool,
/// If `true`, a symbol field will be included with each text-encoded
/// record. If `None`, will default to `true` for [`Encoding::Csv`] and [`Encoding::Json`] encodings,
/// and `false` for [`Encoding::Dbn`].
#[builder(default_code = "*encoding != Encoding::Dbn")]
pub map_symbols: bool,
/// record. Defaults to `true` for [`Encoding::Csv`] and [`Encoding::Json`] encodings
/// when `None`, and `false` for [`Encoding::Dbn`].
pub map_symbols: Option<bool>,
/// If `true`, files will be split by raw symbol. Cannot be requested with [`Symbols::All`].
#[builder(default)]
pub split_symbols: bool,
Expand All @@ -421,7 +425,6 @@ pub struct SubmitJobParams {
pub split_duration: SplitDuration,
/// The optional maximum size (in bytes) of each batched data file before being split.
/// Must be an integer between 1e9 and 10e9 inclusive (1GB - 10GB). Defaults to `None`.
#[builder(default, setter(strip_option))]
pub split_size: Option<NonZeroU64>,
/// The delivery mechanism for the batched data files once processed.
/// Only [`Download`](Delivery::Download) is supported at this time.
Expand All @@ -439,7 +442,6 @@ pub struct SubmitJobParams {
#[builder(default = SType::InstrumentId)]
pub stype_out: SType,
/// The optional maximum number of records to return. Defaults to no limit.
#[builder(default)]
pub limit: Option<NonZeroU64>,
}

Expand Down Expand Up @@ -523,14 +525,12 @@ pub struct BatchJob {

/// The parameters for [`BatchClient::list_jobs()`]. Use [`ListJobsParams::builder()`] to
/// get a builder type with all the preset defaults.
#[derive(Debug, Clone, Default, TypedBuilder, PartialEq, Eq)]
#[derive(Debug, Clone, Default, bon::Builder, PartialEq, Eq)]
pub struct ListJobsParams {
/// The optional filter for job states. If `None`, defaults to all except `Expired`.
#[builder(default, setter(strip_option))]
pub states: Option<Vec<JobState>>,
/// The optional filter for timestamp submitted (will not include jobs prior to
/// this time).
#[builder(default, setter(strip_option))]
pub since: Option<OffsetDateTime>,
}

Expand All @@ -549,16 +549,16 @@ pub struct BatchFileDesc {

/// The parameters for [`BatchClient::download()`]. Use [`DownloadParams::builder()`] to
/// get a builder type with all the preset defaults.
#[derive(Debug, Clone, TypedBuilder, PartialEq, Eq)]
#[derive(Debug, Clone, bon::Builder, PartialEq, Eq)]
pub struct DownloadParams {
/// The directory to download the file(s) to.
#[builder(setter(transform = |dt: impl Into<PathBuf>| dt.into()))]
#[builder(into)]
pub output_dir: PathBuf,
/// The batch job identifier.
#[builder(setter(transform = |dt: impl ToString| dt.to_string()))]
#[builder(with = |id: impl ToString| id.to_string())]
pub job_id: String,
/// `None` means all files associated with the job will be downloaded.
#[builder(default, setter(transform = |filename: impl ToString| Some(filename.to_string())))]
#[builder(with = |f: impl ToString| f.to_string())]
pub filename_to_download: Option<String>,
}

Expand Down Expand Up @@ -803,6 +803,8 @@ mod tests {
const START: time::OffsetDateTime = datetime!(2023 - 06 - 14 00:00 UTC);
const END: time::OffsetDateTime = datetime!(2023 - 06 - 17 00:00 UTC);

// When not explicitly set, map_symbols is None (resolved at request time
// based on encoding)
let params = SubmitJobParams::builder()
.dataset(Dataset::GlbxMdp3)
.encoding(Encoding::Dbn)
Expand All @@ -811,7 +813,7 @@ mod tests {
.date_time_range(START..END)
.build();
assert_eq!(params.encoding, Encoding::Dbn);
assert_eq!(params.map_symbols, false);
assert!(params.map_symbols.is_none());

let params = SubmitJobParams::builder()
.dataset(Dataset::GlbxMdp3)
Expand All @@ -821,8 +823,9 @@ mod tests {
.date_time_range(START..END)
.build();
assert_eq!(params.encoding, Encoding::Csv);
assert_eq!(params.map_symbols, true);
assert!(params.map_symbols.is_none());

// When explicitly set, map_symbols preserves the value
let params = SubmitJobParams::builder()
.dataset(Dataset::GlbxMdp3)
.encoding(Encoding::Json)
Expand All @@ -832,7 +835,7 @@ mod tests {
.map_symbols(false)
.build();
assert_eq!(params.encoding, Encoding::Json);
assert_eq!(params.map_symbols, false);
assert_eq!(params.map_symbols, Some(false));

Ok(())
}
Expand Down
18 changes: 8 additions & 10 deletions src/historical/metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ use dbn::{Encoding, SType, Schema};
use reqwest::RequestBuilder;
use serde::{Deserialize, Deserializer};
use tracing::instrument;
use typed_builder::TypedBuilder;

use crate::{
deserialize::deserialize_date_time,
Expand Down Expand Up @@ -233,7 +232,7 @@ pub struct PublisherDetail {

/// The parameters for [`MetadataClient::list_fields()`]. Use
/// [`ListFieldsParams::builder()`] to get a builder type with all the preset defaults.
#[derive(Debug, Clone, TypedBuilder, PartialEq, Eq)]
#[derive(Debug, Clone, bon::Builder, PartialEq, Eq)]
pub struct ListFieldsParams {
/// The encoding to request fields for.
pub encoding: Encoding,
Expand Down Expand Up @@ -263,14 +262,14 @@ pub struct UnitPricesForMode {
/// The parameters for [`MetadataClient::get_dataset_condition()`]. Use
/// [`GetDatasetConditionParams::builder()`] to get a builder type with all the preset
/// defaults.
#[derive(Debug, Clone, TypedBuilder, PartialEq, Eq)]
#[derive(Debug, Clone, bon::Builder, PartialEq, Eq)]
pub struct GetDatasetConditionParams {
/// The dataset code.
#[builder(setter(transform = |dataset: impl ToString| dataset.to_string()))]
#[builder(with = |d: impl ToString| d.to_string())]
pub dataset: String,
/// The UTC date request range with an inclusive start date and an inclusive end date.
/// If `None` then will return all available dates.
#[builder(default, setter(transform = |dr: impl Into<DateRange>| Some(dr.into())))]
#[builder(into)]
pub date_range: Option<DateRange>,
}

Expand Down Expand Up @@ -310,25 +309,24 @@ impl From<DatasetRange> for DateTimeRange {
}

/// The parameters for several metadata requests.
#[derive(Debug, Clone, TypedBuilder, PartialEq, Eq)]
#[derive(Debug, Clone, bon::Builder, PartialEq, Eq)]
pub struct GetQueryParams {
/// The dataset code.
#[builder(setter(transform = |dataset: impl ToString| dataset.to_string()))]
#[builder(with = |d: impl ToString| d.to_string())]
pub dataset: String,
/// The symbols to filter for.
#[builder(setter(into))]
#[builder(into)]
pub symbols: Symbols,
/// The data record schema.
pub schema: Schema,
/// The request range with an inclusive start and an exclusive end.
#[builder(setter(into))]
#[builder(into)]
pub date_time_range: DateTimeRange,
/// The symbology type of the input `symbols`. Defaults to
/// [`RawSymbol`](dbn::enums::SType::RawSymbol).
#[builder(default = SType::RawSymbol)]
pub stype_in: SType,
/// The optional maximum number of records to return. Defaults to no limit.
#[builder(default)]
pub limit: Option<NonZeroU64>,
}

Expand Down
9 changes: 4 additions & 5 deletions src/historical/symbology.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ use dbn::{MappingInterval, Metadata, SType, TsSymbolMap};
use reqwest::RequestBuilder;
use serde::Deserialize;
use tracing::instrument;
use typed_builder::TypedBuilder;

use crate::{historical::AddToForm, Symbols};

Expand Down Expand Up @@ -57,13 +56,13 @@ impl SymbologyClient<'_> {

/// The parameters for [`SymbologyClient::resolve()`]. Use [`ResolveParams::builder()`]
/// to get a builder type with all the preset defaults.
#[derive(Debug, Clone, TypedBuilder, PartialEq, Eq)]
#[derive(Debug, Clone, bon::Builder, PartialEq, Eq)]
pub struct ResolveParams {
/// The dataset code.
#[builder(setter(transform = |dt: impl ToString| dt.to_string()))]
#[builder(with = |d: impl ToString| d.to_string())]
pub dataset: String,
/// The symbols to resolve.
#[builder(setter(into))]
#[builder(into)]
pub symbols: Symbols,
/// The symbology type of the input `symbols`. Defaults to
/// [`RawSymbol`](dbn::enums::SType::RawSymbol).
Expand All @@ -77,7 +76,7 @@ pub struct ResolveParams {
#[builder(default = SType::InstrumentId)]
pub stype_out: SType,
/// The UTC date range with an inclusive start and an exclusive end.
#[builder(setter(into))]
#[builder(into)]
pub date_range: DateRange,
}

Expand Down
Loading
Loading