Skip to content

Commit 3aa85c4

Browse files
committed
[AURON #2001] Upgrade Rust nightly toolchain to 2025-12-15.
Signed-off-by: slfan1989 <slfan1989@apache.org>
1 parent 31ed89f commit 3aa85c4

File tree

19 files changed

+104
-108
lines changed

19 files changed

+104
-108
lines changed

native-engine/auron-memmgr/src/spill.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -166,10 +166,10 @@ impl Drop for FileSpill {
166166
self.1
167167
.disk_spill_iotime
168168
.add_duration(Duration::from_nanos(self.1.mem_spill_iotime.value() as u64));
169-
if let Some(file_path) = &self.2 {
170-
if let Err(e) = fs::remove_file(file_path) {
171-
warn!("Was unable to delete spill file: {file_path}. error: {e}");
172-
}
169+
if let Some(file_path) = &self.2
170+
&& let Err(e) = fs::remove_file(file_path)
171+
{
172+
warn!("Was unable to delete spill file: {file_path}. error: {e}");
173173
}
174174
}
175175
}

native-engine/datafusion-ext-commons/src/hash/xxhash.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ fn xxh64_merge_round(mut hash: u64, acc: u64) -> u64 {
9898

9999
#[inline]
100100
fn xxh_rotl64(value: u64, amt: i32) -> u64 {
101-
(value << (amt % 64)) | (value >> (64 - amt % 64))
101+
value.rotate_left((amt as u32) % 64)
102102
}
103103

104104
#[inline]

native-engine/datafusion-ext-commons/src/io/batch_serde.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -750,7 +750,7 @@ mod test {
750750
"| [6, 7] | [6, 7] |",
751751
"+-----------+-----------+"
752752
],
753-
&[batch.clone()]
753+
std::slice::from_ref(&batch)
754754
);
755755

756756
// test read after write

native-engine/datafusion-ext-commons/src/io/ipc_compression.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -336,8 +336,8 @@ mod tests {
336336
let test_array2: ArrayRef = Arc::new(StringArray::from(vec![Some("foo"), Some("bar")]));
337337
let schema = Arc::new(Schema::new(vec![Field::new("", DataType::Utf8, false)]));
338338

339-
writer.write_batch(2, &[test_array1.clone()])?;
340-
writer.write_batch(2, &[test_array2.clone()])?;
339+
writer.write_batch(2, std::slice::from_ref(&test_array1))?;
340+
writer.write_batch(2, std::slice::from_ref(&test_array2))?;
341341
writer.finish_current_buf()?;
342342

343343
let mut reader = IpcCompressionReader::new(Cursor::new(buf));

native-engine/datafusion-ext-commons/src/lib.rs

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
#![allow(internal_features)]
1717
#![feature(core_intrinsics)]
1818
#![feature(slice_swap_unchecked)]
19-
#![feature(vec_into_raw_parts)]
2019

2120
use auron_jni_bridge::conf::{
2221
BATCH_SIZE, IntConf, SUGGESTED_BATCH_MEM_SIZE, SUGGESTED_BATCH_MEM_SIZE_KWAY_MERGE,
@@ -141,21 +140,21 @@ macro_rules! assume {
141140
macro_rules! prefetch_read_data {
142141
($e:expr) => {{
143142
// safety: use prefetch
144-
let locality = 3;
143+
const LOCALITY: i32 = 3;
145144
#[allow(unused_unsafe)]
146145
unsafe {
147-
std::intrinsics::prefetch_read_data($e, locality)
146+
std::intrinsics::prefetch_read_data::<_, { LOCALITY }>($e)
148147
}
149148
}};
150149
}
151150
#[macro_export]
152151
macro_rules! prefetch_write_data {
153152
($e:expr) => {{
154153
// safety: use prefetch
155-
let locality = 3;
154+
const LOCALITY: i32 = 3;
156155
#[allow(unused_unsafe)]
157156
unsafe {
158-
std::intrinsics::prefetch_write_data($e, locality)
157+
std::intrinsics::prefetch_write_data::<_, { LOCALITY }>($e)
159158
}
160159
}};
161160
}

native-engine/datafusion-ext-commons/src/spark_hash.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -467,7 +467,7 @@ mod tests {
467467
])) as ArrayRef;
468468

469469
// generated with Murmur3Hash(Seq(Literal(1L)), 42).eval() since Spark is tested
470-
let hashes = create_murmur3_hashes(5, &[i.clone()], 42);
470+
let hashes = create_murmur3_hashes(5, std::slice::from_ref(&i), 42);
471471
let expected: Vec<i32> = [
472472
0x99f0149d_u32,
473473
0x9c67b85d,
@@ -482,7 +482,7 @@ mod tests {
482482

483483
// generated with XxHash64(Seq(Literal(1L)), 42).eval() since Spark is tested
484484
// against this as well
485-
let hashes = create_xxhash64_hashes(5, &[i.clone()], 42);
485+
let hashes = create_xxhash64_hashes(5, std::slice::from_ref(&i), 42);
486486
let expected = vec![
487487
-7001672635703045582,
488488
-5252525462095825812,
@@ -495,11 +495,11 @@ mod tests {
495495

496496
#[test]
497497
fn test_str() {
498-
let i = Arc::new(StringArray::from(vec!["hello", "bar", "", "😁", "天地"]));
498+
let i = Arc::new(StringArray::from(vec!["hello", "bar", "", "😁", "天地"])) as ArrayRef;
499499

500500
// generated with Murmur3Hash(Seq(Literal("")), 42).eval() since Spark is tested
501501
// against this as well
502-
let hashes = create_murmur3_hashes(5, &[i.clone()], 42);
502+
let hashes = create_murmur3_hashes(5, std::slice::from_ref(&i), 42);
503503
let expected: Vec<i32> = [3286402344_u32, 2486176763, 142593372, 885025535, 2395000894]
504504
.into_iter()
505505
.map(|v| v as i32)
@@ -508,7 +508,7 @@ mod tests {
508508

509509
// generated with XxHash64(Seq(Literal("")), 42).eval() since Spark is tested
510510
// against this as well
511-
let hashes = create_xxhash64_hashes(5, &[i.clone()], 42);
511+
let hashes = create_xxhash64_hashes(5, std::slice::from_ref(&i), 42);
512512
let expected = vec![
513513
-4367754540140381902,
514514
-1798770879548125814,
@@ -541,7 +541,7 @@ mod tests {
541541
let array_ref = Arc::new(list_array) as ArrayRef;
542542

543543
// Test Murmur3 hash
544-
let hashes = create_murmur3_hashes(3, &[array_ref.clone()], 42);
544+
let hashes = create_murmur3_hashes(3, std::slice::from_ref(&array_ref), 42);
545545
assert_eq!(hashes, vec![-222940379, -374492525, -331964951]);
546546
Ok(())
547547
}

native-engine/datafusion-ext-functions/src/spark_dates.rs

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -64,11 +64,8 @@ pub fn spark_quarter(args: &[ColumnarValue]) -> Result<ColumnarValue> {
6464
.expect("date_part(Month) must return Int32Array");
6565

6666
// Compute quarter: ((month - 1) / 3) + 1, preserving NULLs
67-
let quarter = Int32Array::from_iter(
68-
month_arr
69-
.iter()
70-
.map(|opt_m| opt_m.map(|m| ((m - 1) / 3 + 1))),
71-
);
67+
let quarter =
68+
Int32Array::from_iter(month_arr.iter().map(|opt_m| opt_m.map(|m| (m - 1) / 3 + 1)));
7269

7370
Ok(ColumnarValue::Array(Arc::new(quarter)))
7471
}

native-engine/datafusion-ext-functions/src/spark_get_json_object.rs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -305,15 +305,15 @@ impl HiveGetJsonObjectEvaluator {
305305
json_str: &str,
306306
) -> std::result::Result<Option<String>, HiveGetJsonObjectError> {
307307
// first try parsing with sonic-rs and fail-backing to serde-json
308-
if let Ok(root_value) = sonic_rs::from_str::<sonic_rs::Value>(json_str) {
309-
if let Ok(v) = self.evaluate_with_value_sonic(&root_value) {
310-
return Ok(v);
311-
}
308+
if let Ok(root_value) = sonic_rs::from_str::<sonic_rs::Value>(json_str)
309+
&& let Ok(v) = self.evaluate_with_value_sonic(&root_value)
310+
{
311+
return Ok(v);
312312
}
313-
if let Ok(root_value) = serde_json::from_str::<serde_json::Value>(json_str) {
314-
if let Ok(v) = self.evaluate_with_value_serde_json(&root_value) {
315-
return Ok(v);
316-
}
313+
if let Ok(root_value) = serde_json::from_str::<serde_json::Value>(json_str)
314+
&& let Ok(v) = self.evaluate_with_value_serde_json(&root_value)
315+
{
316+
return Ok(v);
317317
}
318318
Err(HiveGetJsonObjectError::InvalidInput)
319319
}

native-engine/datafusion-ext-functions/src/spark_strings.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -216,10 +216,10 @@ pub fn string_concat_ws(args: &[ColumnarValue]) -> Result<ColumnarValue> {
216216
if let Ok(s) = as_string_array(&array).cloned() {
217217
return Ok(Arg::Array(s));
218218
}
219-
if let Ok(l) = as_list_array(&array).cloned() {
220-
if l.value_type() == DataType::Utf8 {
221-
return Ok(Arg::List(l));
222-
}
219+
if let Ok(l) = as_list_array(&array).cloned()
220+
&& l.value_type() == DataType::Utf8
221+
{
222+
return Ok(Arg::List(l));
223223
}
224224
}
225225
ColumnarValue::Scalar(scalar) => {

native-engine/datafusion-ext-plans/src/agg/acc.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -456,10 +456,10 @@ impl AccBytesColumn {
456456
fn refresh_heap_mem_used(&mut self) {
457457
self.heap_mem_used = 0;
458458
for item in &self.items {
459-
if let Some(v) = item {
460-
if v.spilled() {
461-
self.heap_mem_used += v.capacity();
462-
}
459+
if let Some(v) = item
460+
&& v.spilled()
461+
{
462+
self.heap_mem_used += v.capacity();
463463
}
464464
}
465465
}

0 commit comments

Comments
 (0)