Skip to content

Commit

Permalink
Update for rust 1.83 (#2562)
Browse files Browse the repository at this point in the history
* Fix issues with rust 1.83

* Set MSRV to 1.82
  • Loading branch information
laggui authored Nov 28, 2024
1 parent 9d073e7 commit 4258502
Show file tree
Hide file tree
Showing 20 changed files with 25 additions and 28 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ env:
# Note: It is not possible to define env vars in composite actions.
# To work around this issue we use inputs and define all the env vars here.

RUST_PREVIOUS_VERSION: 1.81.0
RUST_PREVIOUS_VERSION: 1.82.0

# Cargo
CARGO_TERM_COLOR: "always"
Expand Down
8 changes: 4 additions & 4 deletions backend-comparison/src/burnbenchapp/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,16 +231,16 @@ fn run_backend_comparison_benchmarks(
}

fn run_cargo(
bench: &String,
backend: &String,
bench: &str,
backend: &str,
url: &str,
token: Option<&str>,
progress_bar: &Option<Arc<Mutex<RunnerProgressBar>>>,
) -> io::Result<ExitStatus> {
let processor: Arc<dyn OutputProcessor> = if let Some(pb) = progress_bar {
Arc::new(NiceProcessor::new(
bench.clone(),
backend.clone(),
bench.to_string(),
backend.to_string(),
pb.clone(),
))
} else {
Expand Down
2 changes: 1 addition & 1 deletion crates/burn-core/src/module/param/visitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ struct ParamIdCollector<'a, M> {
phantom: PhantomData<M>,
}

impl<'a, B, M> ModuleVisitor<B> for ParamIdCollector<'a, M>
impl<B, M> ModuleVisitor<B> for ParamIdCollector<'_, M>
where
B: Backend,
M: Module<B>,
Expand Down
4 changes: 1 addition & 3 deletions crates/burn-core/src/optim/grad_accum.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,7 @@ struct ModuleGradsAccumulator<'a, M> {
phantom: PhantomData<M>,
}

impl<'a, B: AutodiffBackend, M: AutodiffModule<B>> ModuleVisitor<B>
for ModuleGradsAccumulator<'a, M>
{
impl<B: AutodiffBackend, M: AutodiffModule<B>> ModuleVisitor<B> for ModuleGradsAccumulator<'_, M> {
fn visit_float<const D: usize>(&mut self, id: ParamId, _tensor: &Tensor<B, D>) {
let grad_updated = match self.grads_new.remove::<B::InnerBackend, D>(id) {
Some(new) => match self.grads.remove::<B::InnerBackend, D>(id) {
Expand Down
2 changes: 1 addition & 1 deletion crates/burn-core/src/optim/simple/adaptor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ where
grad_clipping: Option<&'a GradientClipping>,
}

impl<'a, M, B, O> ModuleMapper<B> for SimpleOptimizerMapper<'a, M, B, O>
impl<M, B, O> ModuleMapper<B> for SimpleOptimizerMapper<'_, M, B, O>
where
M: AutodiffModule<B>,
B: AutodiffBackend,
Expand Down
4 changes: 2 additions & 2 deletions crates/burn-core/src/optim/visitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ pub struct GradientsParamsChangeDevice<'a, M: AutodiffModule<B>, B: AutodiffBack
phatom: PhantomData<M>,
}

impl<'a, B, M> ModuleVisitor<B> for GradientsParamsConverter<'a, M, B>
impl<B, M> ModuleVisitor<B> for GradientsParamsConverter<'_, M, B>
where
B: AutodiffBackend,
M: AutodiffModule<B>,
Expand All @@ -37,7 +37,7 @@ where
}
}

impl<'a, B, M> ModuleVisitor<B> for GradientsParamsChangeDevice<'a, M, B>
impl<B, M> ModuleVisitor<B> for GradientsParamsChangeDevice<'_, M, B>
where
B: AutodiffBackend,
M: AutodiffModule<B>,
Expand Down
2 changes: 1 addition & 1 deletion crates/burn-dataset/src/dataset/iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ impl<'a, I> DatasetIterator<'a, I> {
}
}

impl<'a, I> Iterator for DatasetIterator<'a, I> {
impl<I> Iterator for DatasetIterator<'_, I> {
type Item = I;

fn next(&mut self) -> Option<I> {
Expand Down
4 changes: 2 additions & 2 deletions crates/burn-dataset/src/transform/window.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ impl<'a, I> WindowsIterator<'a, I> {
}
}

impl<'a, I> Iterator for WindowsIterator<'a, I> {
impl<I> Iterator for WindowsIterator<'_, I> {
type Item = Vec<I>;

fn next(&mut self) -> Option<Vec<I>> {
Expand All @@ -88,7 +88,7 @@ impl<'a, I> Iterator for WindowsIterator<'a, I> {
}
}

impl<'a, I> Clone for WindowsIterator<'a, I> {
impl<I> Clone for WindowsIterator<'_, I> {
fn clone(&self) -> Self {
WindowsIterator {
size: self.size,
Expand Down
4 changes: 2 additions & 2 deletions crates/burn-fusion/src/stream/execution/validator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ pub(crate) enum TriggerProgress {
/// trigger.
pub(crate) type TriggerId = usize;

impl<'b, O> OperationsStore for TriggerOperationsStore<'b, O> {
impl<O> OperationsStore for TriggerOperationsStore<'_, O> {
type Id = TriggerId;

fn get(&self, id: Self::Id) -> &[OperationDescription] {
Expand All @@ -131,7 +131,7 @@ pub(crate) struct ExecutionPlanOperationsStore<'a, O> {
store: &'a ExecutionPlanStore<O>,
}

impl<'b, O> OperationsStore for ExecutionPlanOperationsStore<'b, O> {
impl<O> OperationsStore for ExecutionPlanOperationsStore<'_, O> {
type Id = ExecutionPlanId;

fn get(&self, id: Self::Id) -> &[OperationDescription] {
Expand Down
2 changes: 1 addition & 1 deletion crates/burn-fusion/src/stream/multi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ struct Segment<'a, R: FusionRuntime> {
handles: &'a mut HandleContainer<R::FusionHandle>,
}

impl<'i, R: FusionRuntime> StreamSegment<R::Optimization> for Segment<'i, R> {
impl<R: FusionRuntime> StreamSegment<R::Optimization> for Segment<'_, R> {
fn operations(&self) -> &[OperationDescription] {
&self.queue.relative
}
Expand Down
4 changes: 2 additions & 2 deletions crates/burn-import/src/burn/graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -589,7 +589,7 @@ struct BurnGraphState<'a, PS: PrecisionSettings> {
/// Instead, they use the `StructTuple` serialization strategy (to avoid memory overhead presumably).
struct StructMap<'a, PS: PrecisionSettings>(BurnGraphState<'a, PS>);

impl<'a, PS: PrecisionSettings> Serialize for StructMap<'a, PS> {
impl<PS: PrecisionSettings> Serialize for StructMap<'_, PS> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
Expand Down Expand Up @@ -623,7 +623,7 @@ impl<'a, PS: PrecisionSettings> Serialize for StructMap<'a, PS> {
/// serializing tuples. Instead, they use the `StructMap` serialization strategy.
struct StructTuple<'a, PS: PrecisionSettings>(BurnGraphState<'a, PS>);

impl<'a, PS: PrecisionSettings> Serialize for StructTuple<'a, PS> {
impl<PS: PrecisionSettings> Serialize for StructTuple<'_, PS> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
Expand Down
1 change: 1 addition & 0 deletions crates/burn-jit/src/kernel/pool/adaptive_avg_pool2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ fn start_index(output_size_index: u32, output_size: u32, input_size: u32) -> u32
(output_size_index * input_size) / output_size
}

#[allow(clippy::manual_div_ceil)]
#[cube]
fn end_index(output_size_index: u32, output_size: u32, input_size: u32) -> u32 {
let index = (output_size_index + 1) * input_size;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ fn start_index(output_size_index: u32, output_size: u32, input_size: u32) -> u32
(output_size_index * input_size) / output_size
}

#[allow(clippy::manual_div_ceil)]
#[cube]
fn end_index(output_size_index: u32, output_size: u32, input_size: u32) -> u32 {
let index = (output_size_index + 1) * input_size;
Expand Down
2 changes: 1 addition & 1 deletion crates/burn-ndarray/src/sharing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ pub(crate) struct UnsafeSharedRef<'a, T> {
cell: UnsafeCell<&'a mut T>,
}

unsafe impl<'a, T> Sync for UnsafeSharedRef<'a, T> {}
unsafe impl<T> Sync for UnsafeSharedRef<'_, T> {}

impl<'a, T> UnsafeSharedRef<'a, T> {
pub fn new(data: &'a mut T) -> Self {
Expand Down
2 changes: 1 addition & 1 deletion crates/burn-tensor/src/tensor/distribution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ where
Normal(rand_distr::Normal<f64>),
}

impl<'a, E, R> DistributionSampler<'a, E, R>
impl<E, R> DistributionSampler<'_, E, R>
where
Standard: rand::distributions::Distribution<E>,
E: rand::distributions::uniform::SampleUniform,
Expand Down
2 changes: 0 additions & 2 deletions crates/burn-tensor/src/tensor/ops/bool_tensor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,6 @@ pub trait BoolTensorOps<B: Backend> {
/// A boolean tensor `Tensor<B, D, Bool>` with the same size as input `tensor`, except in the `dim` axis
/// where the size is 1. The elem in the `dim` axis is True if any element along this dim in the input
/// evaluates to True, False otherwise.
fn bool_any_dim(tensor: BoolTensor<B>, dim: usize) -> BoolTensor<B> {
let sum = B::int_sum_dim(B::bool_into_int(tensor), dim);
B::int_greater_elem(sum, 0.elem())
Expand Down Expand Up @@ -373,7 +372,6 @@ pub trait BoolTensorOps<B: Backend> {
/// A boolean tensor `Tensor<B, D, Bool>` with the same size as input `tensor`, except in the `dim` axis
/// where the size is 1. The elem in the `dim` axis is True if all elements along this dim in the input
/// evaluates to True, False otherwise.
fn bool_all_dim(tensor: BoolTensor<B>, dim: usize) -> BoolTensor<B> {
let num_elems = tensor.shape().dims[dim];
let sum = B::int_sum_dim(B::bool_into_int(tensor), dim);
Expand Down
1 change: 0 additions & 1 deletion crates/burn-tensor/src/tensor/report.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ use colored::*;
///
/// Closeness check complete.
/// ```
pub fn check_closeness<B: Backend, const D: usize>(output: &Tensor<B, D>, expected: &Tensor<B, D>) {
println!("{}", "Tensor Closeness Check Results:".bold());
println!("===============================");
Expand Down
2 changes: 1 addition & 1 deletion crates/burn-train/src/renderer/tui/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ pub(crate) struct MetricsView<'a> {
status: StatusView,
}

impl<'a> MetricsView<'a> {
impl MetricsView<'_> {
pub(crate) fn render(self, frame: &mut TerminalFrame<'_>, size: Rect) {
let chunks = Layout::default()
.direction(Direction::Vertical)
Expand Down
2 changes: 1 addition & 1 deletion crates/burn-train/src/renderer/tui/metric_numeric.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ pub(crate) enum NumericMetricView<'a> {
None,
}

impl<'a> NumericMetricView<'a> {
impl NumericMetricView<'_> {
pub(crate) fn render(self, frame: &mut TerminalFrame<'_>, size: Rect) {
match self {
Self::Plots(titles, selected, chart, kind) => {
Expand Down
2 changes: 1 addition & 1 deletion crates/burn/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ license.workspace = true
name = "burn"
readme.workspace = true
repository = "https://github.com/tracel-ai/burn"
rust-version = "1.81"
rust-version = "1.82"
version.workspace = true

[features]
Expand Down

0 comments on commit 4258502

Please sign in to comment.