Make cyclic descriptors in bounce buffers work

This commit is contained in:
Jakub Hlusička 2026-02-22 16:31:12 +01:00
parent 8f55f23840
commit 735d0a48bb

View file

@ -185,6 +185,9 @@ impl DmaBounce {
Self::linear_descriptors_for_buffer(window_size, burst_config, |_| {});
let bounce_src_descs = if cyclic {
Self::bounce_descriptors_for_buffer_cyclic(
row_front_porch_bytes,
row_width_bytes,
window_size_rows,
unsafe {
(
&mut *(bounce_buffer_dst as *mut _),
@ -260,125 +263,16 @@ impl DmaBounce {
descriptors
}
fn bounce_descriptors_for_buffer_cyclic(
bounce_buffers: (&'static mut [u8], &'static mut [u8]),
burst_config: BurstConfig,
) -> &'static mut [DmaDescriptor] {
assert_eq!(
bounce_buffers.0.len(),
bounce_buffers.1.len(),
"bounce buffers must be equal in size"
);
let buffer_len = bounce_buffers.0.len();
let max_chunk_size = burst_config.max_compatible_chunk_size();
let descriptors_len = dma::descriptor_count(
buffer_len,
max_chunk_size,
// TODO: This might need to be set to true?
// I don't know why cyclic descriptor lists must be at least 3 descriptors long.
false,
);
let descriptors_combined =
Box::leak(vec![DmaDescriptor::EMPTY; 2 * descriptors_len].into_boxed_slice());
let descriptors_pair = descriptors_combined.split_at_mut(descriptors_len);
// Link up the descriptors.
fn link_up_descriptors(
descriptors: &mut [DmaDescriptor],
descriptors_other: &mut [DmaDescriptor],
) {
let mut next = descriptors_other.first_mut().unwrap();
for desc in descriptors.iter_mut().rev() {
desc.next = next;
next = desc;
}
}
link_up_descriptors(descriptors_pair.0, descriptors_pair.1);
link_up_descriptors(descriptors_pair.1, descriptors_pair.0);
// Prepare each descriptor's buffer size.
for (bounce_buffer, descriptors) in [
(bounce_buffers.0, descriptors_pair.0),
(bounce_buffers.1, descriptors_pair.1),
] {
let mut descriptors_it = descriptors.iter_mut();
let mut remaining_bounce_buffer = bounce_buffer;
while !remaining_bounce_buffer.is_empty() {
let chunk_size = core::cmp::min(max_chunk_size, remaining_bounce_buffer.len());
let desc = descriptors_it.next().unwrap();
desc.buffer = remaining_bounce_buffer.as_mut_ptr();
remaining_bounce_buffer = &mut remaining_bounce_buffer[chunk_size..];
let is_last = remaining_bounce_buffer.is_empty();
desc.set_size(chunk_size);
desc.set_length(chunk_size);
desc.reset_for_tx(is_last);
}
}
descriptors_combined
}
fn bounce_descriptors_for_buffer_single(
windows_len: usize,
fn prepare_descriptors_window(
bounce_buffer: &mut [u8],
descriptors_window: &mut [DmaDescriptor],
row_front_porch_bytes: usize,
row_width_bytes: usize,
window_size_rows: usize,
bounce_buffers: (&'static mut [u8], &'static mut [u8]),
burst_config: BurstConfig,
) -> &'static mut [DmaDescriptor] {
assert_eq!(
bounce_buffers.0.len(),
bounce_buffers.1.len(),
"bounce buffers must be equal in size"
);
// If an odd number of windows were needed, two descriptor lists would be needed,
assert_eq!(windows_len % 2, 0, "the number of windows must be even");
let buffer_len = bounce_buffers.0.len();
assert_eq!(
buffer_len,
row_width_bytes * window_size_rows,
"the provided bounce buffers have an invalid size"
);
warn!(
"windows_len: {windows_len}\nrow_front_porch_bytes: {row_front_porch_bytes}\nrow_width_bytes: {row_width_bytes}\nwindow_size_rows: {window_size_rows}\nbuffer_len: {buffer_len}",
);
let max_chunk_size = burst_config.max_compatible_chunk_size();
let descriptors_per_row_front_porch =
dma::descriptor_count(row_front_porch_bytes, max_chunk_size, false);
let descriptors_per_row_stored =
dma::descriptor_count(row_width_bytes, max_chunk_size, false);
let descriptors_per_row = descriptors_per_row_stored + descriptors_per_row_front_porch;
let descriptors_per_window = window_size_rows * descriptors_per_row;
let descriptors_per_frame = descriptors_per_window * windows_len;
let descriptors_frame =
Box::leak(vec![DmaDescriptor::EMPTY; descriptors_per_frame].into_boxed_slice());
// Link up the descriptors.
let mut next = core::ptr::null_mut();
for desc in descriptors_frame.iter_mut().rev() {
desc.next = next;
next = desc;
}
// Prepare each descriptor's buffer size.
let bounce_buffers = [bounce_buffers.0, bounce_buffers.1];
for (window_index, descriptors_window) in descriptors_frame
.chunks_mut(descriptors_per_window)
.enumerate()
{
let bounce_buffer_index = window_index % 2;
let bounce_buffer = &mut *bounce_buffers[bounce_buffer_index];
// let bounce_buffer_ptr = bounce_buffers[bounce_buffer_index].as_mut_ptr();
// let mut remaining_bounce_buffer = &mut *bounce_buffers[bounce_buffer_index];
max_chunk_size: usize,
descriptors_per_row: usize,
descriptors_per_row_front_porch: usize,
) {
for (row_index_in_window, descriptors_row) in descriptors_window
.chunks_mut(descriptors_per_row)
.enumerate()
@ -419,16 +313,15 @@ impl DmaBounce {
// Prepare window descriptors.
{
let mut remaining_bounce_buffer = &mut bounce_buffer
[row_index_in_window * row_width_bytes..][..row_width_bytes];
let mut remaining_bounce_buffer =
&mut bounce_buffer[row_index_in_window * row_width_bytes..][..row_width_bytes];
// if remaining_bounce_buffer.len() > row_width_bytes {
// remaining_bounce_buffer = &mut remaining_bounce_buffer[..row_width_bytes];
// }
for desc in &mut *descriptors_row_stored {
let chunk_size =
core::cmp::min(max_chunk_size, remaining_bounce_buffer.len());
let chunk_size = core::cmp::min(max_chunk_size, remaining_bounce_buffer.len());
desc.buffer = remaining_bounce_buffer.as_mut_ptr();
remaining_bounce_buffer = &mut remaining_bounce_buffer[chunk_size..];
desc.set_size(chunk_size);
@ -465,6 +358,137 @@ impl DmaBounce {
);
}
fn bounce_descriptors_for_buffer_cyclic(
row_front_porch_bytes: usize,
row_width_bytes: usize,
window_size_rows: usize,
bounce_buffers: (&'static mut [u8], &'static mut [u8]),
burst_config: BurstConfig,
) -> &'static mut [DmaDescriptor] {
assert_eq!(
bounce_buffers.0.len(),
bounce_buffers.1.len(),
"bounce buffers must be equal in size"
);
let buffer_len = bounce_buffers.0.len();
assert_eq!(
buffer_len,
row_width_bytes * window_size_rows,
"the provided bounce buffers have an invalid size"
);
let max_chunk_size = burst_config.max_compatible_chunk_size();
let descriptors_per_row_front_porch =
dma::descriptor_count(row_front_porch_bytes, max_chunk_size, false);
let descriptors_per_row_stored =
dma::descriptor_count(row_width_bytes, max_chunk_size, false);
let descriptors_per_row = descriptors_per_row_stored + descriptors_per_row_front_porch;
let descriptors_per_window = window_size_rows * descriptors_per_row;
let descriptors_combined =
Box::leak(vec![DmaDescriptor::EMPTY; 2 * descriptors_per_window].into_boxed_slice());
let descriptors_pair = descriptors_combined.split_at_mut(descriptors_per_window);
// Link up the descriptors.
fn link_up_descriptors(
descriptors: &mut [DmaDescriptor],
descriptors_other: &mut [DmaDescriptor],
) {
let mut next = descriptors_other.first_mut().unwrap();
for desc in descriptors.iter_mut().rev() {
desc.next = next;
next = desc;
}
}
link_up_descriptors(descriptors_pair.0, descriptors_pair.1);
link_up_descriptors(descriptors_pair.1, descriptors_pair.0);
// Prepare each descriptor's buffer size.
for (bounce_buffer, descriptors) in [
(bounce_buffers.0, descriptors_pair.0),
(bounce_buffers.1, descriptors_pair.1),
] {
Self::prepare_descriptors_window(
bounce_buffer,
descriptors,
row_front_porch_bytes,
row_width_bytes,
window_size_rows,
max_chunk_size,
descriptors_per_row,
descriptors_per_row_front_porch,
);
}
descriptors_combined
}
fn bounce_descriptors_for_buffer_single(
windows_len: usize,
row_front_porch_bytes: usize,
row_width_bytes: usize,
window_size_rows: usize,
bounce_buffers: (&'static mut [u8], &'static mut [u8]),
burst_config: BurstConfig,
) -> &'static mut [DmaDescriptor] {
assert_eq!(
bounce_buffers.0.len(),
bounce_buffers.1.len(),
"bounce buffers must be equal in size"
);
// If an odd number of windows were needed, two descriptor lists would be needed,
assert_eq!(windows_len % 2, 0, "the number of windows must be even");
let buffer_len = bounce_buffers.0.len();
assert_eq!(
buffer_len,
row_width_bytes * window_size_rows,
"the provided bounce buffers have an invalid size"
);
let max_chunk_size = burst_config.max_compatible_chunk_size();
let descriptors_per_row_front_porch =
dma::descriptor_count(row_front_porch_bytes, max_chunk_size, false);
let descriptors_per_row_stored =
dma::descriptor_count(row_width_bytes, max_chunk_size, false);
let descriptors_per_row = descriptors_per_row_stored + descriptors_per_row_front_porch;
let descriptors_per_window = window_size_rows * descriptors_per_row;
let descriptors_per_frame = descriptors_per_window * windows_len;
let descriptors_frame =
Box::leak(vec![DmaDescriptor::EMPTY; descriptors_per_frame].into_boxed_slice());
// Link up the descriptors.
let mut next = core::ptr::null_mut();
for desc in descriptors_frame.iter_mut().rev() {
desc.next = next;
next = desc;
}
// Prepare each descriptor's buffer size.
let bounce_buffers = [bounce_buffers.0, bounce_buffers.1];
for (window_index, descriptors_window) in descriptors_frame
.chunks_mut(descriptors_per_window)
.enumerate()
{
let bounce_buffer_index = window_index % 2;
let bounce_buffer = &mut *bounce_buffers[bounce_buffer_index];
Self::prepare_descriptors_window(
bounce_buffer,
descriptors_window,
row_front_porch_bytes,
row_width_bytes,
window_size_rows,
max_chunk_size,
descriptors_per_row,
descriptors_per_row_front_porch,
);
}
assert_eq!(
descriptors_frame
.iter()