2026-02-15 02:33:42 +01:00
|
|
|
|
use core::{
|
|
|
|
|
|
alloc::Layout,
|
|
|
|
|
|
pin::Pin,
|
2026-02-15 18:17:16 +01:00
|
|
|
|
sync::atomic::{self, AtomicBool, AtomicU32, AtomicUsize},
|
2026-02-15 02:33:42 +01:00
|
|
|
|
};
|
2026-02-14 20:03:32 +01:00
|
|
|
|
|
|
|
|
|
|
use alloc::{
|
|
|
|
|
|
alloc::{Allocator, Global},
|
|
|
|
|
|
boxed::Box,
|
|
|
|
|
|
vec,
|
|
|
|
|
|
};
|
2026-02-15 18:17:16 +01:00
|
|
|
|
use embassy_sync::{
|
|
|
|
|
|
channel::{Channel, TrySendError},
|
|
|
|
|
|
signal::Signal,
|
|
|
|
|
|
};
|
2026-02-18 05:03:05 +01:00
|
|
|
|
use embassy_time::{Duration, Instant, Timer, WithTimeout};
|
2026-02-14 20:03:32 +01:00
|
|
|
|
use esp_alloc::MemoryCapability;
|
|
|
|
|
|
use esp_hal::{
|
|
|
|
|
|
Blocking,
|
|
|
|
|
|
dma::{
|
|
|
|
|
|
self, AnyGdmaChannel, BufView, BurstConfig, DmaChannel, DmaChannelConvert, DmaDescriptor,
|
|
|
|
|
|
DmaDescriptorFlags, DmaEligible, DmaRxStreamBuf, DmaTxBuf, DmaTxBuffer, DmaTxInterrupt,
|
2026-02-22 00:59:01 +01:00
|
|
|
|
ExternalBurstConfig, InternalBurstConfig, Mem2Mem, SimpleMem2MemTransfer,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
},
|
2026-02-15 02:33:42 +01:00
|
|
|
|
dma_descriptors, handler,
|
|
|
|
|
|
interrupt::{self, Priority},
|
|
|
|
|
|
lcd_cam::lcd::dpi::{Dpi, DpiTransfer},
|
2026-02-18 05:03:05 +01:00
|
|
|
|
peripherals::{DMA, DMA_CH0, Interrupt, Peripherals, SPI2},
|
2026-02-14 20:03:32 +01:00
|
|
|
|
ram,
|
|
|
|
|
|
spi::master::AnySpi,
|
|
|
|
|
|
};
|
|
|
|
|
|
use esp_sync::RawMutex;
|
2026-02-22 16:09:20 +01:00
|
|
|
|
use i_slint_core::software_renderer::{Rgb565Pixel, TargetPixel};
|
2026-02-14 20:03:32 +01:00
|
|
|
|
use indoc::{formatdoc, indoc};
|
2026-02-15 02:33:42 +01:00
|
|
|
|
use log::{error, info, warn};
|
2026-02-15 18:17:16 +01:00
|
|
|
|
use rmk::{
|
|
|
|
|
|
futures::{FutureExt, pin_mut},
|
|
|
|
|
|
join_all,
|
|
|
|
|
|
};
|
2026-02-14 20:03:32 +01:00
|
|
|
|
|
2026-02-22 00:59:01 +01:00
|
|
|
|
use crate::{PSRAM_ALLOCATOR, peripherals::st7701s::St7701s, util::DurationExt};
|
2026-02-14 20:03:32 +01:00
|
|
|
|
|
2026-02-15 02:33:42 +01:00
|
|
|
|
/// THIS IS TAKEN FROM https://github.com/esp-rs/esp-hal/blob/main/esp-hal/src/soc/esp32s3/mod.rs
|
|
|
|
|
|
/// Write back a specific range of data in the cache.
|
|
|
|
|
|
#[doc(hidden)]
|
|
|
|
|
|
#[unsafe(link_section = ".rwtext")]
|
|
|
|
|
|
pub unsafe fn cache_writeback_addr(addr: u32, size: u32) {
|
|
|
|
|
|
unsafe extern "C" {
|
|
|
|
|
|
fn rom_Cache_WriteBack_Addr(addr: u32, size: u32);
|
|
|
|
|
|
fn Cache_Suspend_DCache_Autoload() -> u32;
|
|
|
|
|
|
fn Cache_Resume_DCache_Autoload(value: u32);
|
|
|
|
|
|
}
|
|
|
|
|
|
// suspend autoload, avoid load cachelines being written back
|
|
|
|
|
|
unsafe {
|
|
|
|
|
|
let autoload = Cache_Suspend_DCache_Autoload();
|
|
|
|
|
|
rom_Cache_WriteBack_Addr(addr, size);
|
|
|
|
|
|
Cache_Resume_DCache_Autoload(autoload);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// THIS IS TAKEN FROM https://github.com/esp-rs/esp-hal/blob/main/esp-hal/src/soc/esp32s3/mod.rs
|
|
|
|
|
|
/// Invalidate a specific range of addresses in the cache.
|
|
|
|
|
|
#[doc(hidden)]
|
|
|
|
|
|
#[unsafe(link_section = ".rwtext")]
|
|
|
|
|
|
pub unsafe fn cache_invalidate_addr(addr: u32, size: u32) {
|
|
|
|
|
|
unsafe extern "C" {
|
|
|
|
|
|
fn Cache_Invalidate_Addr(addr: u32, size: u32);
|
|
|
|
|
|
}
|
|
|
|
|
|
unsafe {
|
|
|
|
|
|
Cache_Invalidate_Addr(addr, size);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-18 05:03:05 +01:00
|
|
|
|
// const DMA_CHANNEL_INBOUND: usize = 0;
|
|
|
|
|
|
// const INTERRUPT_INBOUND: Interrupt = Interrupt::DMA_IN_CH0;
|
|
|
|
|
|
const DMA_CHANNEL_OUTBOUND: usize = 2;
|
|
|
|
|
|
const INTERRUPT_OUTBOUND: Interrupt = Interrupt::DMA_OUT_CH2;
|
|
|
|
|
|
|
2026-02-15 02:33:42 +01:00
|
|
|
|
pub struct DmaBounce {
|
2026-02-14 20:03:32 +01:00
|
|
|
|
// TODO: Make these generic.
|
|
|
|
|
|
// They currently cannot be generic, because they lacks a `reborrow` method.
|
|
|
|
|
|
channel: DMA_CH0<'static>,
|
|
|
|
|
|
// This can also be more generic, see `DmaEligible` in `Mem2Mem::new`.
|
2026-02-15 02:33:42 +01:00
|
|
|
|
peripheral_src: AnySpi<'static>,
|
|
|
|
|
|
// This can also be more generic, see `DmaEligible` in `Mem2Mem::new`.
|
|
|
|
|
|
peripheral_dst: Option<Dpi<'static, Blocking>>,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
|
2026-02-15 02:33:42 +01:00
|
|
|
|
// TODO: Consider having a separate burst config for the two transfers.
|
2026-02-14 20:03:32 +01:00
|
|
|
|
burst_config: BurstConfig,
|
2026-02-17 00:51:02 +01:00
|
|
|
|
cyclic: bool,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
/// The size of each window.
|
|
|
|
|
|
window_size: usize,
|
|
|
|
|
|
/// The number of windows.
|
|
|
|
|
|
windows_len: usize,
|
|
|
|
|
|
buffer_src: &'static mut [u8],
|
|
|
|
|
|
// Two buffers of size `window_size`,
|
|
|
|
|
|
// one of which is being written to, while the other is being read from.
|
2026-02-15 02:33:42 +01:00
|
|
|
|
bounce_buffer_dst: &'static mut [u8],
|
|
|
|
|
|
bounce_buffer_src: &'static mut [u8],
|
|
|
|
|
|
// A descriptor list that spans a buffer of size `window_size`.
|
|
|
|
|
|
// The buffer pointers need to be updated before each transmission to point to the correct window in the source buffer `src_buffer`.
|
|
|
|
|
|
src_descs: &'static mut [DmaDescriptor],
|
|
|
|
|
|
// A descriptor list that spans a buffer of size `window_size`.
|
|
|
|
|
|
// The buffer pointers need to be updated before each transmission to point to the correct bounce buffer.
|
|
|
|
|
|
bounce_dst_descs: &'static mut [DmaDescriptor],
|
|
|
|
|
|
// A cyclic descriptor list that spans the buffers `bounce_buffer_dst` and `bounce_buffer_src`.
|
|
|
|
|
|
bounce_src_descs: &'static mut [DmaDescriptor],
|
2026-02-15 18:17:16 +01:00
|
|
|
|
// The index of the next window about to be received into the destination bounce buffer.
|
|
|
|
|
|
window_index_next: usize,
|
|
|
|
|
|
frame_index_next: usize,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-15 02:33:42 +01:00
|
|
|
|
impl DmaBounce {
|
2026-02-22 16:09:20 +01:00
|
|
|
|
/// * `allocator` - The allocator used to allocate the bounce buffers.
|
|
|
|
|
|
/// * `channel` - The DMA channel used to transfer data from the source buffer to the bounce buffers.
|
|
|
|
|
|
/// * `peripheral_src` - The peripheral to transfer data from the source buffer to the bounce buffers.
|
|
|
|
|
|
/// * `peripheral_dst` - The peripheral to transfer data to, from the bounce buffers.
|
|
|
|
|
|
/// * `buffer_src` - The source buffer, typically allocated in external memory.
|
|
|
|
|
|
/// * `row_front_porch_bytes` - The number of arbitrary-valued bytes to be sent in front of each row to the destination peripheral.
|
|
|
|
|
|
/// * `row_width_bytes` - The width of a row, in bytes.
|
|
|
|
|
|
/// * `window_size_rows` - The size of a single bounce buffer, in rows.
|
|
|
|
|
|
/// * `burst_config` - The burst config to use for memory transfers (both in and out). TODO: This could be split.
|
|
|
|
|
|
/// * `cyclic` - Experimental! Whether to use a cyclic descriptor list for transfer from the bounce buffers to the destination peripheral.
|
2026-02-14 20:03:32 +01:00
|
|
|
|
pub fn new(
|
2026-02-22 00:59:01 +01:00
|
|
|
|
allocator: impl Allocator + Copy + 'static,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
channel: DMA_CH0<'static>,
|
2026-02-15 02:33:42 +01:00
|
|
|
|
peripheral_src: AnySpi<'static>,
|
|
|
|
|
|
peripheral_dst: Dpi<'static, Blocking>,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
buffer_src: &'static mut [u8],
|
2026-02-22 16:09:20 +01:00
|
|
|
|
row_front_porch_bytes: usize,
|
|
|
|
|
|
row_width_bytes: usize,
|
|
|
|
|
|
window_size_rows: usize,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
burst_config: BurstConfig,
|
2026-02-17 00:51:02 +01:00
|
|
|
|
cyclic: bool,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
) -> Self {
|
2026-02-22 16:09:20 +01:00
|
|
|
|
let window_size = row_width_bytes * window_size_rows;
|
|
|
|
|
|
|
2026-02-14 20:03:32 +01:00
|
|
|
|
assert_eq!(
|
|
|
|
|
|
buffer_src.len() % window_size,
|
|
|
|
|
|
0,
|
|
|
|
|
|
"the size of a source buffer must be a multiple of the window size ({window_size} bytes), but it is {len} bytes large",
|
|
|
|
|
|
len = buffer_src.len()
|
|
|
|
|
|
);
|
2026-02-22 00:59:01 +01:00
|
|
|
|
|
|
|
|
|
|
// Conservative alignment. Maxiumum of the cartesian product of [tx, rx] × [internal, external].
|
|
|
|
|
|
let alignment = burst_config.min_compatible_alignment();
|
|
|
|
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
|
|
buffer_src.as_ptr() as usize % alignment,
|
|
|
|
|
|
0,
|
|
|
|
|
|
"the source buffer must be sufficiently aligned to {alignment} bytes for the burst config",
|
|
|
|
|
|
);
|
|
|
|
|
|
assert_eq!(
|
2026-02-22 16:09:20 +01:00
|
|
|
|
row_width_bytes % alignment,
|
2026-02-22 00:59:01 +01:00
|
|
|
|
0,
|
2026-02-22 16:09:20 +01:00
|
|
|
|
"the size of a row in bytes must be sufficiently aligned to {alignment} bytes for the burst config",
|
|
|
|
|
|
);
|
|
|
|
|
|
assert_eq!(
|
|
|
|
|
|
row_front_porch_bytes % alignment,
|
|
|
|
|
|
0,
|
|
|
|
|
|
"the size of a row's front porch in bytes must be sufficiently aligned to {alignment} bytes for the burst config",
|
|
|
|
|
|
);
|
|
|
|
|
|
// We need to make the destination peripheral read the front porch data from somewhere,
|
|
|
|
|
|
// and that somewhere is currently the bounce buffer.
|
|
|
|
|
|
// Therefore the front porch must be in bounds.
|
|
|
|
|
|
assert!(
|
|
|
|
|
|
row_front_porch_bytes <= window_size,
|
|
|
|
|
|
"front porch too large"
|
2026-02-22 00:59:01 +01:00
|
|
|
|
);
|
|
|
|
|
|
|
2026-02-17 00:51:02 +01:00
|
|
|
|
let windows_len = buffer_src.len() / window_size;
|
2026-02-22 00:59:01 +01:00
|
|
|
|
// TODO: Figure out a way to avoid `leak`ing memory.
|
|
|
|
|
|
// We probably want to store the `Box`es and then unsafely extend the lifetime at sites of usage.
|
|
|
|
|
|
let bounce_buffer_dst =
|
|
|
|
|
|
Box::leak(allocate_dma_buffer_in(window_size, burst_config, allocator));
|
|
|
|
|
|
let bounce_buffer_src =
|
|
|
|
|
|
Box::leak(allocate_dma_buffer_in(window_size, burst_config, allocator));
|
2026-02-15 02:33:42 +01:00
|
|
|
|
let src_descs = Self::linear_descriptors_for_buffer(window_size, burst_config, |desc| {
|
|
|
|
|
|
desc.reset_for_tx(desc.next.is_null());
|
|
|
|
|
|
// Length for TX buffers must be set in software.
|
|
|
|
|
|
// In RX buffers, it is set by hardware.
|
|
|
|
|
|
desc.set_length(desc.size());
|
|
|
|
|
|
});
|
|
|
|
|
|
let bounce_dst_descs =
|
|
|
|
|
|
Self::linear_descriptors_for_buffer(window_size, burst_config, |_| {});
|
2026-02-17 00:51:02 +01:00
|
|
|
|
let bounce_src_descs = if cyclic {
|
2026-02-22 16:09:20 +01:00
|
|
|
|
Self::bounce_descriptors_for_buffer_cyclic(
|
2026-02-22 16:31:12 +01:00
|
|
|
|
row_front_porch_bytes,
|
|
|
|
|
|
row_width_bytes,
|
|
|
|
|
|
window_size_rows,
|
2026-02-17 00:51:02 +01:00
|
|
|
|
unsafe {
|
|
|
|
|
|
(
|
|
|
|
|
|
&mut *(bounce_buffer_dst as *mut _),
|
|
|
|
|
|
&mut *(bounce_buffer_src as *mut _),
|
|
|
|
|
|
)
|
|
|
|
|
|
},
|
|
|
|
|
|
burst_config,
|
|
|
|
|
|
)
|
|
|
|
|
|
} else {
|
2026-02-22 16:09:20 +01:00
|
|
|
|
Self::bounce_descriptors_for_buffer_single(
|
|
|
|
|
|
windows_len,
|
|
|
|
|
|
row_front_porch_bytes,
|
|
|
|
|
|
row_width_bytes,
|
|
|
|
|
|
window_size_rows,
|
2026-02-17 00:51:02 +01:00
|
|
|
|
unsafe {
|
|
|
|
|
|
(
|
|
|
|
|
|
&mut *(bounce_buffer_dst as *mut _),
|
|
|
|
|
|
&mut *(bounce_buffer_src as *mut _),
|
|
|
|
|
|
)
|
|
|
|
|
|
},
|
|
|
|
|
|
burst_config,
|
|
|
|
|
|
)
|
|
|
|
|
|
};
|
2026-02-15 02:33:42 +01:00
|
|
|
|
|
2026-02-14 20:03:32 +01:00
|
|
|
|
Self {
|
|
|
|
|
|
channel,
|
2026-02-15 02:33:42 +01:00
|
|
|
|
peripheral_src,
|
|
|
|
|
|
peripheral_dst: Some(peripheral_dst),
|
2026-02-14 20:03:32 +01:00
|
|
|
|
burst_config,
|
2026-02-17 00:51:02 +01:00
|
|
|
|
cyclic,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
window_size,
|
2026-02-17 00:51:02 +01:00
|
|
|
|
windows_len,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
buffer_src,
|
2026-02-15 02:33:42 +01:00
|
|
|
|
bounce_buffer_dst,
|
|
|
|
|
|
bounce_buffer_src,
|
|
|
|
|
|
src_descs,
|
|
|
|
|
|
bounce_dst_descs,
|
|
|
|
|
|
bounce_src_descs,
|
2026-02-15 18:17:16 +01:00
|
|
|
|
window_index_next: 0,
|
|
|
|
|
|
frame_index_next: 0,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn linear_descriptors_for_buffer(
|
|
|
|
|
|
buffer_len: usize,
|
2026-02-15 02:33:42 +01:00
|
|
|
|
burst_config: BurstConfig,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
mut setup_desc: impl FnMut(&mut DmaDescriptor),
|
|
|
|
|
|
) -> &'static mut [DmaDescriptor] {
|
|
|
|
|
|
let max_chunk_size = burst_config.max_compatible_chunk_size();
|
2026-02-15 02:33:42 +01:00
|
|
|
|
let descriptors_len = dma::descriptor_count(buffer_len, max_chunk_size, false);
|
2026-02-14 20:03:32 +01:00
|
|
|
|
// TODO: This leaks memory. Ensure it's only called during setup.
|
|
|
|
|
|
let descriptors = Box::leak(vec![DmaDescriptor::EMPTY; descriptors_len].into_boxed_slice());
|
|
|
|
|
|
|
|
|
|
|
|
// Link up the descriptors.
|
|
|
|
|
|
let mut next = core::ptr::null_mut();
|
|
|
|
|
|
for desc in descriptors.iter_mut().rev() {
|
|
|
|
|
|
desc.next = next;
|
|
|
|
|
|
next = desc;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Prepare each descriptor's buffer size.
|
|
|
|
|
|
let mut descriptors_it = descriptors.iter_mut();
|
|
|
|
|
|
let mut remaining_len = buffer_len;
|
2026-02-15 02:33:42 +01:00
|
|
|
|
|
2026-02-14 20:03:32 +01:00
|
|
|
|
while remaining_len > 0 {
|
|
|
|
|
|
let chunk_size = core::cmp::min(max_chunk_size, remaining_len);
|
|
|
|
|
|
let desc = descriptors_it.next().unwrap();
|
|
|
|
|
|
desc.set_size(chunk_size);
|
|
|
|
|
|
(setup_desc)(desc);
|
|
|
|
|
|
remaining_len -= chunk_size;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
descriptors
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-22 16:31:12 +01:00
|
|
|
|
fn prepare_descriptors_window(
|
|
|
|
|
|
bounce_buffer: &mut [u8],
|
|
|
|
|
|
descriptors_window: &mut [DmaDescriptor],
|
|
|
|
|
|
row_front_porch_bytes: usize,
|
|
|
|
|
|
row_width_bytes: usize,
|
|
|
|
|
|
window_size_rows: usize,
|
|
|
|
|
|
max_chunk_size: usize,
|
|
|
|
|
|
descriptors_per_row: usize,
|
|
|
|
|
|
descriptors_per_row_front_porch: usize,
|
|
|
|
|
|
) {
|
|
|
|
|
|
for (row_index_in_window, descriptors_row) in descriptors_window
|
|
|
|
|
|
.chunks_mut(descriptors_per_row)
|
|
|
|
|
|
.enumerate()
|
|
|
|
|
|
{
|
|
|
|
|
|
// let row_index = row_index_in_window + window_index * window_size_rows;
|
|
|
|
|
|
let (descriptors_row_front_porch, descriptors_row_stored) =
|
|
|
|
|
|
descriptors_row.split_at_mut(descriptors_per_row_front_porch);
|
|
|
|
|
|
|
|
|
|
|
|
// Prepare front porch descriptors.
|
|
|
|
|
|
{
|
|
|
|
|
|
let mut descriptors_it = descriptors_row_front_porch.iter_mut();
|
|
|
|
|
|
let mut remaining_front_porch = row_front_porch_bytes;
|
|
|
|
|
|
|
|
|
|
|
|
while remaining_front_porch > 0 {
|
|
|
|
|
|
let desc = descriptors_it.next().unwrap();
|
|
|
|
|
|
let chunk_size = core::cmp::min(max_chunk_size, remaining_front_porch);
|
|
|
|
|
|
remaining_front_porch -= chunk_size;
|
|
|
|
|
|
// Just make it point at a bounce buffer.
|
|
|
|
|
|
// It is guaranteed to have enough bytes by `DmaBounce::new`.
|
|
|
|
|
|
desc.buffer = bounce_buffer.as_mut_ptr();
|
|
|
|
|
|
desc.set_size(chunk_size);
|
|
|
|
|
|
desc.set_length(chunk_size);
|
|
|
|
|
|
desc.reset_for_tx(false);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
assert!(
|
|
|
|
|
|
descriptors_it.next().is_none(),
|
|
|
|
|
|
"front porch descriptors must be used up"
|
|
|
|
|
|
);
|
|
|
|
|
|
assert_eq!(
|
|
|
|
|
|
descriptors_row_front_porch
|
|
|
|
|
|
.iter()
|
|
|
|
|
|
.map(|desc| desc.size())
|
|
|
|
|
|
.sum::<usize>(),
|
|
|
|
|
|
row_front_porch_bytes
|
|
|
|
|
|
);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Prepare window descriptors.
|
|
|
|
|
|
{
|
|
|
|
|
|
let mut remaining_bounce_buffer =
|
|
|
|
|
|
&mut bounce_buffer[row_index_in_window * row_width_bytes..][..row_width_bytes];
|
|
|
|
|
|
|
|
|
|
|
|
// if remaining_bounce_buffer.len() > row_width_bytes {
|
|
|
|
|
|
// remaining_bounce_buffer = &mut remaining_bounce_buffer[..row_width_bytes];
|
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
|
|
for desc in &mut *descriptors_row_stored {
|
|
|
|
|
|
let chunk_size = core::cmp::min(max_chunk_size, remaining_bounce_buffer.len());
|
|
|
|
|
|
desc.buffer = remaining_bounce_buffer.as_mut_ptr();
|
|
|
|
|
|
remaining_bounce_buffer = &mut remaining_bounce_buffer[chunk_size..];
|
|
|
|
|
|
desc.set_size(chunk_size);
|
|
|
|
|
|
desc.set_length(chunk_size);
|
|
|
|
|
|
desc.reset_for_tx(false);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
assert!(
|
|
|
|
|
|
remaining_bounce_buffer.is_empty(),
|
|
|
|
|
|
"bounce buffer must be used up"
|
|
|
|
|
|
);
|
|
|
|
|
|
assert_eq!(
|
|
|
|
|
|
descriptors_row_stored
|
|
|
|
|
|
.iter()
|
|
|
|
|
|
.map(|desc| desc.size())
|
|
|
|
|
|
.sum::<usize>(),
|
|
|
|
|
|
row_width_bytes
|
|
|
|
|
|
);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Set EOF bit on the last descriptor of the window, to signal
|
|
|
|
|
|
// that the bounce buffer is done being read from.
|
|
|
|
|
|
if let Some(last_desc) = descriptors_window.last_mut() {
|
|
|
|
|
|
last_desc.reset_for_tx(true);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
|
|
descriptors_window
|
|
|
|
|
|
.iter()
|
|
|
|
|
|
.map(|desc| desc.size())
|
|
|
|
|
|
.sum::<usize>(),
|
|
|
|
|
|
window_size_rows * (row_front_porch_bytes + row_width_bytes)
|
|
|
|
|
|
);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-17 00:51:02 +01:00
|
|
|
|
fn bounce_descriptors_for_buffer_cyclic(
|
2026-02-22 16:31:12 +01:00
|
|
|
|
row_front_porch_bytes: usize,
|
|
|
|
|
|
row_width_bytes: usize,
|
|
|
|
|
|
window_size_rows: usize,
|
2026-02-15 02:33:42 +01:00
|
|
|
|
bounce_buffers: (&'static mut [u8], &'static mut [u8]),
|
|
|
|
|
|
burst_config: BurstConfig,
|
|
|
|
|
|
) -> &'static mut [DmaDescriptor] {
|
|
|
|
|
|
assert_eq!(
|
|
|
|
|
|
bounce_buffers.0.len(),
|
|
|
|
|
|
bounce_buffers.1.len(),
|
|
|
|
|
|
"bounce buffers must be equal in size"
|
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
|
|
let buffer_len = bounce_buffers.0.len();
|
2026-02-22 16:31:12 +01:00
|
|
|
|
|
|
|
|
|
|
assert_eq!(
|
2026-02-17 00:51:02 +01:00
|
|
|
|
buffer_len,
|
2026-02-22 16:31:12 +01:00
|
|
|
|
row_width_bytes * window_size_rows,
|
|
|
|
|
|
"the provided bounce buffers have an invalid size"
|
2026-02-17 00:51:02 +01:00
|
|
|
|
);
|
2026-02-22 16:31:12 +01:00
|
|
|
|
|
|
|
|
|
|
let max_chunk_size = burst_config.max_compatible_chunk_size();
|
|
|
|
|
|
let descriptors_per_row_front_porch =
|
|
|
|
|
|
dma::descriptor_count(row_front_porch_bytes, max_chunk_size, false);
|
|
|
|
|
|
let descriptors_per_row_stored =
|
|
|
|
|
|
dma::descriptor_count(row_width_bytes, max_chunk_size, false);
|
|
|
|
|
|
let descriptors_per_row = descriptors_per_row_stored + descriptors_per_row_front_porch;
|
|
|
|
|
|
let descriptors_per_window = window_size_rows * descriptors_per_row;
|
2026-02-15 02:33:42 +01:00
|
|
|
|
let descriptors_combined =
|
2026-02-22 16:31:12 +01:00
|
|
|
|
Box::leak(vec![DmaDescriptor::EMPTY; 2 * descriptors_per_window].into_boxed_slice());
|
|
|
|
|
|
let descriptors_pair = descriptors_combined.split_at_mut(descriptors_per_window);
|
2026-02-15 02:33:42 +01:00
|
|
|
|
|
|
|
|
|
|
// Link up the descriptors.
|
|
|
|
|
|
fn link_up_descriptors(
|
|
|
|
|
|
descriptors: &mut [DmaDescriptor],
|
|
|
|
|
|
descriptors_other: &mut [DmaDescriptor],
|
|
|
|
|
|
) {
|
|
|
|
|
|
let mut next = descriptors_other.first_mut().unwrap();
|
|
|
|
|
|
for desc in descriptors.iter_mut().rev() {
|
|
|
|
|
|
desc.next = next;
|
|
|
|
|
|
next = desc;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
link_up_descriptors(descriptors_pair.0, descriptors_pair.1);
|
|
|
|
|
|
link_up_descriptors(descriptors_pair.1, descriptors_pair.0);
|
|
|
|
|
|
|
|
|
|
|
|
// Prepare each descriptor's buffer size.
|
|
|
|
|
|
for (bounce_buffer, descriptors) in [
|
|
|
|
|
|
(bounce_buffers.0, descriptors_pair.0),
|
|
|
|
|
|
(bounce_buffers.1, descriptors_pair.1),
|
|
|
|
|
|
] {
|
2026-02-22 16:31:12 +01:00
|
|
|
|
Self::prepare_descriptors_window(
|
|
|
|
|
|
bounce_buffer,
|
|
|
|
|
|
descriptors,
|
|
|
|
|
|
row_front_porch_bytes,
|
|
|
|
|
|
row_width_bytes,
|
|
|
|
|
|
window_size_rows,
|
|
|
|
|
|
max_chunk_size,
|
|
|
|
|
|
descriptors_per_row,
|
|
|
|
|
|
descriptors_per_row_front_porch,
|
|
|
|
|
|
);
|
2026-02-15 02:33:42 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
descriptors_combined
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-17 00:51:02 +01:00
|
|
|
|
fn bounce_descriptors_for_buffer_single(
|
|
|
|
|
|
windows_len: usize,
|
2026-02-22 16:09:20 +01:00
|
|
|
|
row_front_porch_bytes: usize,
|
|
|
|
|
|
row_width_bytes: usize,
|
|
|
|
|
|
window_size_rows: usize,
|
2026-02-17 00:51:02 +01:00
|
|
|
|
bounce_buffers: (&'static mut [u8], &'static mut [u8]),
|
|
|
|
|
|
burst_config: BurstConfig,
|
|
|
|
|
|
) -> &'static mut [DmaDescriptor] {
|
|
|
|
|
|
assert_eq!(
|
|
|
|
|
|
bounce_buffers.0.len(),
|
|
|
|
|
|
bounce_buffers.1.len(),
|
|
|
|
|
|
"bounce buffers must be equal in size"
|
|
|
|
|
|
);
|
|
|
|
|
|
// If an odd number of windows were needed, two descriptor lists would be needed,
|
|
|
|
|
|
assert_eq!(windows_len % 2, 0, "the number of windows must be even");
|
|
|
|
|
|
|
|
|
|
|
|
let buffer_len = bounce_buffers.0.len();
|
2026-02-22 16:09:20 +01:00
|
|
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
|
|
buffer_len,
|
|
|
|
|
|
row_width_bytes * window_size_rows,
|
|
|
|
|
|
"the provided bounce buffers have an invalid size"
|
2026-02-17 00:51:02 +01:00
|
|
|
|
);
|
|
|
|
|
|
|
2026-02-22 16:09:20 +01:00
|
|
|
|
let max_chunk_size = burst_config.max_compatible_chunk_size();
|
|
|
|
|
|
let descriptors_per_row_front_porch =
|
|
|
|
|
|
dma::descriptor_count(row_front_porch_bytes, max_chunk_size, false);
|
|
|
|
|
|
let descriptors_per_row_stored =
|
|
|
|
|
|
dma::descriptor_count(row_width_bytes, max_chunk_size, false);
|
|
|
|
|
|
let descriptors_per_row = descriptors_per_row_stored + descriptors_per_row_front_porch;
|
|
|
|
|
|
let descriptors_per_window = window_size_rows * descriptors_per_row;
|
|
|
|
|
|
let descriptors_per_frame = descriptors_per_window * windows_len;
|
|
|
|
|
|
let descriptors_frame =
|
|
|
|
|
|
Box::leak(vec![DmaDescriptor::EMPTY; descriptors_per_frame].into_boxed_slice());
|
|
|
|
|
|
|
2026-02-17 00:51:02 +01:00
|
|
|
|
// Link up the descriptors.
|
|
|
|
|
|
let mut next = core::ptr::null_mut();
|
|
|
|
|
|
for desc in descriptors_frame.iter_mut().rev() {
|
|
|
|
|
|
desc.next = next;
|
|
|
|
|
|
next = desc;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Prepare each descriptor's buffer size.
|
|
|
|
|
|
let bounce_buffers = [bounce_buffers.0, bounce_buffers.1];
|
|
|
|
|
|
|
2026-02-22 16:09:20 +01:00
|
|
|
|
for (window_index, descriptors_window) in descriptors_frame
|
2026-02-17 00:51:02 +01:00
|
|
|
|
.chunks_mut(descriptors_per_window)
|
|
|
|
|
|
.enumerate()
|
|
|
|
|
|
{
|
2026-02-22 16:09:20 +01:00
|
|
|
|
let bounce_buffer_index = window_index % 2;
|
|
|
|
|
|
let bounce_buffer = &mut *bounce_buffers[bounce_buffer_index];
|
2026-02-17 00:51:02 +01:00
|
|
|
|
|
2026-02-22 16:31:12 +01:00
|
|
|
|
Self::prepare_descriptors_window(
|
|
|
|
|
|
bounce_buffer,
|
|
|
|
|
|
descriptors_window,
|
|
|
|
|
|
row_front_porch_bytes,
|
|
|
|
|
|
row_width_bytes,
|
|
|
|
|
|
window_size_rows,
|
|
|
|
|
|
max_chunk_size,
|
|
|
|
|
|
descriptors_per_row,
|
|
|
|
|
|
descriptors_per_row_front_porch,
|
2026-02-22 16:09:20 +01:00
|
|
|
|
);
|
2026-02-17 00:51:02 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-22 16:09:20 +01:00
|
|
|
|
assert_eq!(
|
|
|
|
|
|
descriptors_frame
|
|
|
|
|
|
.iter()
|
|
|
|
|
|
.map(|desc| desc.size())
|
|
|
|
|
|
.sum::<usize>(),
|
|
|
|
|
|
windows_len * window_size_rows * (row_front_porch_bytes + row_width_bytes)
|
|
|
|
|
|
);
|
|
|
|
|
|
|
2026-02-17 00:51:02 +01:00
|
|
|
|
descriptors_frame
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-15 02:33:42 +01:00
|
|
|
|
fn linear_descriptors_prepare(
|
2026-02-14 20:03:32 +01:00
|
|
|
|
descriptors: &mut [DmaDescriptor],
|
2026-02-15 02:33:42 +01:00
|
|
|
|
mut buffer: Option<&mut [u8]>,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
mut setup_desc: impl FnMut(&mut DmaDescriptor),
|
|
|
|
|
|
) {
|
|
|
|
|
|
for descriptor in descriptors.iter_mut() {
|
2026-02-15 02:33:42 +01:00
|
|
|
|
if let Some(inner_buffer) = buffer {
|
|
|
|
|
|
descriptor.buffer = inner_buffer.as_mut_ptr();
|
|
|
|
|
|
buffer = Some(&mut inner_buffer[descriptor.size()..]);
|
|
|
|
|
|
}
|
2026-02-14 20:03:32 +01:00
|
|
|
|
(setup_desc)(descriptor);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-15 02:33:42 +01:00
|
|
|
|
if let Some(buffer) = buffer {
|
|
|
|
|
|
assert!(
|
|
|
|
|
|
buffer.is_empty(),
|
2026-02-22 00:59:01 +01:00
|
|
|
|
"a buffer of an incompatible length was assigned to a descriptor set"
|
2026-02-15 02:33:42 +01:00
|
|
|
|
);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-02-14 20:03:32 +01:00
|
|
|
|
|
2026-02-15 02:33:42 +01:00
|
|
|
|
fn enable_interrupts() {
|
2026-02-14 20:03:32 +01:00
|
|
|
|
// Enable interrupts for the peripheral
|
2026-02-18 05:03:05 +01:00
|
|
|
|
// interrupt::enable(INTERRUPT_INBOUND, dma_inbound_interrupt_handler.priority()).unwrap();
|
|
|
|
|
|
interrupt::enable(
|
|
|
|
|
|
INTERRUPT_OUTBOUND,
|
|
|
|
|
|
dma_outbound_interrupt_handler.priority(),
|
|
|
|
|
|
)
|
|
|
|
|
|
.unwrap();
|
2026-02-14 20:03:32 +01:00
|
|
|
|
|
|
|
|
|
|
// Bind the handler
|
|
|
|
|
|
unsafe {
|
2026-02-18 05:03:05 +01:00
|
|
|
|
// interrupt::bind_interrupt(INTERRUPT_INBOUND, dma_inbound_interrupt_handler.handler());
|
|
|
|
|
|
interrupt::bind_interrupt(INTERRUPT_OUTBOUND, dma_outbound_interrupt_handler.handler());
|
2026-02-14 20:03:32 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Enable interrupts in the peripheral.
|
2026-02-18 05:03:05 +01:00
|
|
|
|
// DMA::regs()
|
|
|
|
|
|
// .ch(DMA_CHANNEL_INBOUND)
|
|
|
|
|
|
// .in_int()
|
|
|
|
|
|
// .ena()
|
|
|
|
|
|
// .modify(|_, w| w.in_done().bit(true));
|
2026-02-14 20:03:32 +01:00
|
|
|
|
DMA::regs()
|
2026-02-18 05:03:05 +01:00
|
|
|
|
.ch(DMA_CHANNEL_OUTBOUND)
|
2026-02-14 20:03:32 +01:00
|
|
|
|
.out_int()
|
|
|
|
|
|
.ena()
|
2026-02-17 00:51:02 +01:00
|
|
|
|
.modify(|_, w| w.out_eof().bit(true));
|
2026-02-15 18:17:16 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// Receive a window of bytes into the current dst bounce buffer.
|
|
|
|
|
|
/// Finally, swaps the bounce buffers.
|
2026-02-18 05:03:05 +01:00
|
|
|
|
async fn receive_window(&mut self) {
|
2026-02-15 18:17:16 +01:00
|
|
|
|
// Descriptors are initialized by `DmaTxBuf::new`.
|
|
|
|
|
|
let buffer_src_window =
|
|
|
|
|
|
&mut self.buffer_src[self.window_index_next * self.window_size..][..self.window_size];
|
|
|
|
|
|
|
2026-02-22 00:59:01 +01:00
|
|
|
|
Self::linear_descriptors_prepare(self.src_descs, Some(buffer_src_window), |_desc| {
|
|
|
|
|
|
// No need to call `DmaDescriptor::reset_for_tx`, because
|
|
|
|
|
|
// 1. we don't rely on the ownership flag;
|
|
|
|
|
|
// 2. the EOF flag is already set during the construction of this buffer.
|
2026-02-17 00:51:02 +01:00
|
|
|
|
});
|
2026-02-15 18:17:16 +01:00
|
|
|
|
// TODO: Precompute a descriptor list for each buffer, then use `None` instead of `Some(&mut *self.bounce_buffer_dst)`.
|
|
|
|
|
|
Self::linear_descriptors_prepare(
|
|
|
|
|
|
self.bounce_dst_descs,
|
|
|
|
|
|
Some(&mut *self.bounce_buffer_dst),
|
|
|
|
|
|
|desc| {
|
|
|
|
|
|
desc.reset_for_rx();
|
|
|
|
|
|
},
|
2026-02-15 02:33:42 +01:00
|
|
|
|
);
|
2026-02-15 18:17:16 +01:00
|
|
|
|
|
|
|
|
|
|
{
|
|
|
|
|
|
// Extend the lifetime to 'static because it is required by Mem2Mem.
|
|
|
|
|
|
//
|
|
|
|
|
|
// Safety:
|
|
|
|
|
|
// Pointees are done being used by the driver before this scope ends,
|
|
|
|
|
|
// this is because we `SimpleMem2MemTransfer::wait()` on the transfer to finish.
|
2026-02-22 00:59:01 +01:00
|
|
|
|
let bounce_dst_descs: &'static mut [DmaDescriptor] =
|
|
|
|
|
|
unsafe { &mut *(self.bounce_dst_descs as *mut _) };
|
|
|
|
|
|
let src_descs: &'static mut [DmaDescriptor] =
|
|
|
|
|
|
unsafe { &mut *(self.src_descs as *mut _) };
|
|
|
|
|
|
|
2026-02-15 18:17:16 +01:00
|
|
|
|
let mut mem2mem = Mem2Mem::new(self.channel.reborrow(), self.peripheral_src.reborrow())
|
2026-02-18 05:03:05 +01:00
|
|
|
|
.into_async()
|
2026-02-15 18:17:16 +01:00
|
|
|
|
.with_descriptors(bounce_dst_descs, src_descs, self.burst_config)
|
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
let transfer = mem2mem
|
|
|
|
|
|
.start_transfer(&mut self.bounce_buffer_dst, buffer_src_window)
|
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
2026-02-18 05:03:05 +01:00
|
|
|
|
transfer.wait_async().await.unwrap();
|
2026-02-15 18:17:16 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
self.increase_window_counter(1);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn increase_window_counter(&mut self, windows: usize) {
|
|
|
|
|
|
if windows % 2 == 1 {
|
|
|
|
|
|
core::mem::swap(&mut self.bounce_buffer_dst, &mut self.bounce_buffer_src);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-18 05:03:05 +01:00
|
|
|
|
self.window_index_next += windows;
|
2026-02-15 18:17:16 +01:00
|
|
|
|
self.frame_index_next += self.window_index_next / self.windows_len;
|
|
|
|
|
|
self.window_index_next = self.window_index_next % self.windows_len;
|
2026-02-15 02:33:42 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-22 00:59:01 +01:00
|
|
|
|
pub async fn send(&mut self) {
|
2026-02-15 02:33:42 +01:00
|
|
|
|
Self::enable_interrupts();
|
|
|
|
|
|
|
2026-02-15 18:17:16 +01:00
|
|
|
|
// Receive the first window, so that the outbound transfer can read valid data.
|
2026-02-18 05:03:05 +01:00
|
|
|
|
self.receive_window().await;
|
2026-02-15 18:17:16 +01:00
|
|
|
|
|
2026-02-15 02:33:42 +01:00
|
|
|
|
let mut dma_tx_buffer = self.get_dma_tx_buffer();
|
2026-02-17 00:51:02 +01:00
|
|
|
|
let mut transfer = self
|
2026-02-15 02:33:42 +01:00
|
|
|
|
.peripheral_dst
|
|
|
|
|
|
.take()
|
|
|
|
|
|
.unwrap()
|
2026-02-17 00:51:02 +01:00
|
|
|
|
.send(self.cyclic /* Send perpetually */, dma_tx_buffer)
|
|
|
|
|
|
.unwrap_or_else(|(error, _, _)| {
|
|
|
|
|
|
panic!("failed to begin the transmission of the first frame: {error:?}");
|
|
|
|
|
|
});
|
2026-02-14 20:03:32 +01:00
|
|
|
|
|
2026-02-15 18:17:16 +01:00
|
|
|
|
let mut windows_skipped_total = 0;
|
2026-02-14 20:03:32 +01:00
|
|
|
|
|
2026-02-15 18:17:16 +01:00
|
|
|
|
loop {
|
2026-02-22 00:59:01 +01:00
|
|
|
|
// warn!(
|
|
|
|
|
|
// "Receiving window: {} {}",
|
|
|
|
|
|
// self.window_index_next, self.frame_index_next
|
|
|
|
|
|
// );
|
2026-02-18 05:03:05 +01:00
|
|
|
|
self.receive_window().await;
|
2026-02-22 00:59:01 +01:00
|
|
|
|
// warn!(
|
|
|
|
|
|
// "Window received: {} {}",
|
|
|
|
|
|
// self.window_index_next, self.frame_index_next
|
|
|
|
|
|
// );
|
2026-02-18 05:03:05 +01:00
|
|
|
|
let windows_skipped = WINDOWS_SKIPPED
|
|
|
|
|
|
.wait()
|
2026-02-22 00:59:01 +01:00
|
|
|
|
.with_timeout(Duration::from_millis(100))
|
|
|
|
|
|
.await
|
|
|
|
|
|
.unwrap_or_else(|_| {
|
2026-02-22 16:09:20 +01:00
|
|
|
|
error!("Timed out when waiting for skipped windows.");
|
2026-02-22 00:59:01 +01:00
|
|
|
|
0 // TODO: This should be -1 to repeat the same window.
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
// let windows_skipped = match windows_skipped {
|
|
|
|
|
|
// Ok(windows_skipped) => windows_skipped,
|
|
|
|
|
|
// Err(_) => {
|
|
|
|
|
|
// warn!(
|
|
|
|
|
|
// "Waiting for skipped windows timed out. Transfer done: {}",
|
|
|
|
|
|
// transfer.is_done()
|
|
|
|
|
|
// );
|
|
|
|
|
|
// if transfer.is_done() {
|
|
|
|
|
|
// let (result, _, _) = transfer.wait();
|
|
|
|
|
|
// panic!("Transfer result: {result:?}");
|
|
|
|
|
|
// }
|
|
|
|
|
|
// 0
|
|
|
|
|
|
// }
|
|
|
|
|
|
// };
|
2026-02-15 18:17:16 +01:00
|
|
|
|
|
|
|
|
|
|
if windows_skipped > 0 {
|
|
|
|
|
|
self.increase_window_counter(windows_skipped);
|
|
|
|
|
|
windows_skipped_total += windows_skipped;
|
2026-02-22 00:59:01 +01:00
|
|
|
|
// error!(
|
|
|
|
|
|
// "Skipped {windows_skipped} windows. Windows skipped per frame: {:.2}%",
|
2026-02-22 16:09:20 +01:00
|
|
|
|
// 100.0 * windows_skipped_total as f32
|
|
|
|
|
|
// / (self.windows_len * (self.frame_index_next + 1)) as f32
|
2026-02-22 00:59:01 +01:00
|
|
|
|
// );
|
2026-02-15 02:33:42 +01:00
|
|
|
|
}
|
2026-02-17 00:51:02 +01:00
|
|
|
|
|
2026-02-22 00:59:01 +01:00
|
|
|
|
// warn!(
|
|
|
|
|
|
// "X: {} {} {}",
|
|
|
|
|
|
// windows_skipped, self.window_index_next, self.frame_index_next
|
|
|
|
|
|
// );
|
|
|
|
|
|
|
|
|
|
|
|
if !self.cyclic && (self.window_index_next == 1 || transfer.is_done()) {
|
|
|
|
|
|
if self.window_index_next > 1 {
|
|
|
|
|
|
self.increase_window_counter(self.windows_len - self.window_index_next + 1);
|
|
|
|
|
|
} else if self.window_index_next == 0 {
|
|
|
|
|
|
self.increase_window_counter(1);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-17 00:51:02 +01:00
|
|
|
|
// TODO: Investigate why the DPI transfer isn't done at this point.
|
|
|
|
|
|
// The `DpiTransfer::wait()` below takes 0.001039 s.
|
|
|
|
|
|
// Perhaps it's the minimum screen refresh period?
|
|
|
|
|
|
//
|
|
|
|
|
|
// assert!(transfer.is_done());
|
|
|
|
|
|
// if !transfer.is_done() {
|
|
|
|
|
|
// error!(
|
|
|
|
|
|
// "transfer is not done yet. {} {}",
|
|
|
|
|
|
// self.frame_index_next, self.window_index_next
|
|
|
|
|
|
// );
|
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
|
|
let result;
|
|
|
|
|
|
let peripheral_dst;
|
|
|
|
|
|
// let start = Instant::now();
|
|
|
|
|
|
(result, peripheral_dst, dma_tx_buffer) = transfer.wait();
|
|
|
|
|
|
// let duration = Instant::now().duration_since(start);
|
|
|
|
|
|
// warn!("Waited for {} seconds", duration.display_as_secs());
|
|
|
|
|
|
|
|
|
|
|
|
if let Err(error) = result {
|
|
|
|
|
|
error!("DPI error during sending: {error:?}");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
transfer =
|
|
|
|
|
|
peripheral_dst
|
|
|
|
|
|
.send(false, dma_tx_buffer)
|
|
|
|
|
|
.unwrap_or_else(|(error, _, _)| {
|
|
|
|
|
|
panic!("failed to begin the transmission of a frame: {error:?}");
|
|
|
|
|
|
});
|
2026-02-22 00:59:01 +01:00
|
|
|
|
|
|
|
|
|
|
FRAMES_SKIPPED.signal(
|
|
|
|
|
|
FRAMES_SKIPPED
|
|
|
|
|
|
.try_take()
|
|
|
|
|
|
.map(|frames_skipped| frames_skipped + 1)
|
|
|
|
|
|
.unwrap_or_default(),
|
|
|
|
|
|
);
|
2026-02-17 00:51:02 +01:00
|
|
|
|
}
|
2026-02-15 02:33:42 +01:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn get_dma_tx_buffer(&mut self) -> DmaTxBounceBuf {
|
|
|
|
|
|
DmaTxBounceBuf {
|
|
|
|
|
|
preparation: dma::Preparation {
|
|
|
|
|
|
start: self.bounce_src_descs.first_mut().unwrap(),
|
|
|
|
|
|
direction: dma::TransferDirection::Out,
|
|
|
|
|
|
accesses_psram: false,
|
|
|
|
|
|
burst_transfer: self.burst_config,
|
2026-02-22 16:09:20 +01:00
|
|
|
|
check_owner: Some(false), // Possibly want to set this to false
|
|
|
|
|
|
auto_write_back: false, // Possibly true
|
2026-02-15 02:33:42 +01:00
|
|
|
|
},
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub struct DmaTxBounceBuf {
|
|
|
|
|
|
preparation: dma::Preparation,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
unsafe impl DmaTxBuffer for DmaTxBounceBuf {
|
|
|
|
|
|
type View = Self;
|
|
|
|
|
|
type Final = Self;
|
|
|
|
|
|
|
|
|
|
|
|
fn prepare(&mut self) -> dma::Preparation {
|
|
|
|
|
|
dma::Preparation {
|
|
|
|
|
|
start: self.preparation.start,
|
|
|
|
|
|
direction: self.preparation.direction,
|
|
|
|
|
|
accesses_psram: self.preparation.accesses_psram,
|
|
|
|
|
|
burst_transfer: self.preparation.burst_transfer,
|
|
|
|
|
|
check_owner: self.preparation.check_owner,
|
|
|
|
|
|
auto_write_back: self.preparation.auto_write_back,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-02-15 02:33:42 +01:00
|
|
|
|
|
|
|
|
|
|
fn into_view(self) -> Self::View {
|
|
|
|
|
|
self
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn from_view(view: Self::View) -> Self::Final {
|
|
|
|
|
|
view
|
|
|
|
|
|
}
|
2026-02-14 20:03:32 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-22 00:59:01 +01:00
|
|
|
|
/// Intended to be listened on by the renderer, to synchronize the refresh frequency with.
|
|
|
|
|
|
pub static FRAMES_SKIPPED: Signal<RawMutex, usize> = Signal::new();
|
2026-02-15 18:17:16 +01:00
|
|
|
|
static WINDOWS_SKIPPED: Signal<RawMutex, usize> = Signal::new();
|
2026-02-18 05:03:05 +01:00
|
|
|
|
// static INBOUND_TRANSFER_FINISHED: Signal<RawMutex, ()> = Signal::new();
|
2026-02-14 20:03:32 +01:00
|
|
|
|
|
2026-02-15 02:33:42 +01:00
|
|
|
|
#[handler(priority = Priority::Priority3)]
|
2026-02-14 20:03:32 +01:00
|
|
|
|
#[ram] // Improves performance.
|
2026-02-18 05:03:05 +01:00
|
|
|
|
fn dma_outbound_interrupt_handler() {
|
|
|
|
|
|
let interrupt = DMA::regs().ch(DMA_CHANNEL_OUTBOUND).out_int();
|
2026-02-15 18:17:16 +01:00
|
|
|
|
let bounce_buffer_processed = interrupt.st().read().out_eof().bit_is_set();
|
|
|
|
|
|
if bounce_buffer_processed {
|
|
|
|
|
|
// Clear the bit by writing 1 to the clear bits.
|
|
|
|
|
|
interrupt.clr().write(|w| w.out_eof().bit(true));
|
|
|
|
|
|
|
2026-02-22 00:59:01 +01:00
|
|
|
|
WINDOWS_SKIPPED.signal(
|
|
|
|
|
|
WINDOWS_SKIPPED
|
|
|
|
|
|
.try_take()
|
|
|
|
|
|
.map(|windows_skipped| windows_skipped + 1)
|
|
|
|
|
|
.unwrap_or_default(),
|
|
|
|
|
|
);
|
2026-02-15 18:17:16 +01:00
|
|
|
|
}
|
2026-02-14 20:03:32 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-18 05:03:05 +01:00
|
|
|
|
// #[handler(priority = Priority::Priority3)]
|
|
|
|
|
|
// #[ram] // Improves performance.
|
|
|
|
|
|
// fn dma_inbound_interrupt_handler() {
|
|
|
|
|
|
// warn!("Inbound");
|
|
|
|
|
|
|
|
|
|
|
|
// let interrupt = DMA::regs().ch(DMA_CHANNEL_INBOUND).in_int();
|
|
|
|
|
|
// let bounce_buffer_processed = interrupt.st().read().in_done().bit_is_set();
|
|
|
|
|
|
// if bounce_buffer_processed {
|
|
|
|
|
|
// // Clear the bit by writing 1 to the clear bits.
|
|
|
|
|
|
// interrupt.clr().write(|w| w.in_done().bit(true));
|
|
|
|
|
|
|
|
|
|
|
|
// assert!(
|
|
|
|
|
|
// !INBOUND_TRANSFER_FINISHED.signaled(),
|
|
|
|
|
|
// "inbound transfer already signalled"
|
|
|
|
|
|
// );
|
|
|
|
|
|
// INBOUND_TRANSFER_FINISHED.signal(());
|
|
|
|
|
|
// }
|
|
|
|
|
|
// }
|
|
|
|
|
|
|
2026-02-22 00:59:01 +01:00
|
|
|
|
// pub async fn run_lcd(
|
|
|
|
|
|
// mut st7701s: St7701s<'static, Blocking>,
|
|
|
|
|
|
// framebuffer: &'static mut Framebuffer,
|
|
|
|
|
|
// ) {
|
|
|
|
|
|
// loop {
|
|
|
|
|
|
// // Timer::after(Duration::from_millis(100)).await;
|
|
|
|
|
|
// // yield_now().await;
|
|
|
|
|
|
// SIGNAL_LCD_SUBMIT.wait().await;
|
|
|
|
|
|
|
|
|
|
|
|
// // TODO: Use bounce buffers:
|
|
|
|
|
|
// // https://docs.espressif.com/projects/esp-idf/en/v5.0/esp32s3/api-reference/peripherals/lcd.html#bounce-buffer-with-single-psram-frame-buffer
|
|
|
|
|
|
// // This can be implemented as a `DmaTxBuffer`.
|
|
|
|
|
|
// let transfer = match st7701s.dpi.send(false, framebuffer.dma_buf.take().unwrap()) {
|
|
|
|
|
|
// Err((error, result_dpi, result_dma_buf)) => {
|
|
|
|
|
|
// error!(
|
|
|
|
|
|
// "An error occurred while initiating transfer of the framebuffer to the LCD display: {error:?}"
|
|
|
|
|
|
// );
|
|
|
|
|
|
// st7701s.dpi = result_dpi;
|
|
|
|
|
|
// framebuffer.dma_buf = Some(result_dma_buf);
|
|
|
|
|
|
// continue;
|
|
|
|
|
|
// }
|
|
|
|
|
|
// Ok(transfer) => transfer,
|
|
|
|
|
|
// };
|
|
|
|
|
|
|
|
|
|
|
|
// // This could be used to allow other tasks to be executed on the first core, but that causes
|
|
|
|
|
|
// // the flash to be accessed, which interferes with the framebuffer transfer.
|
|
|
|
|
|
// // For that reason, it is disabled, and this task blocks the first core, until the transfer
|
|
|
|
|
|
// // is complete.
|
|
|
|
|
|
// #[cfg(not(feature = "limit-fps"))]
|
|
|
|
|
|
// while !transfer.is_done() {
|
|
|
|
|
|
// // Timer::after_millis(1).await;
|
|
|
|
|
|
// rmk::embassy_futures::yield_now().await;
|
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
|
|
// let result;
|
|
|
|
|
|
// let dma_buf;
|
|
|
|
|
|
// (result, st7701s.dpi, dma_buf) = transfer.wait();
|
|
|
|
|
|
// framebuffer.dma_buf = Some(dma_buf);
|
|
|
|
|
|
|
|
|
|
|
|
// SIGNAL_UI_RENDER.signal(());
|
|
|
|
|
|
|
|
|
|
|
|
// if let Err(error) = result {
|
|
|
|
|
|
// error!(
|
|
|
|
|
|
// "An error occurred while transferring framebuffer to the LCD display: {error:?}"
|
|
|
|
|
|
// );
|
|
|
|
|
|
// }
|
|
|
|
|
|
// }
|
|
|
|
|
|
// }
|
2026-02-14 20:03:32 +01:00
|
|
|
|
|
|
|
|
|
|
/// Allocates a buffer appropriately aligned for use with DMA.
|
2026-02-22 00:59:01 +01:00
|
|
|
|
pub fn allocate_dma_buffer_in<A: Allocator>(
|
|
|
|
|
|
len: usize,
|
|
|
|
|
|
burst_config: BurstConfig,
|
|
|
|
|
|
alloc: A,
|
|
|
|
|
|
) -> Box<[u8], A> {
|
|
|
|
|
|
// Conservative alignment. Maxiumum of the cartesian product of [tx, rx] × [internal, external].
|
|
|
|
|
|
let alignment = burst_config.min_compatible_alignment();
|
2026-02-14 20:03:32 +01:00
|
|
|
|
|
|
|
|
|
|
assert_eq!(
|
2026-02-22 00:59:01 +01:00
|
|
|
|
len % alignment,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
0,
|
2026-02-22 00:59:01 +01:00
|
|
|
|
"the size of a DMA buffer must be a multiple of {alignment} bytes, but it is {len} bytes large"
|
2026-02-14 20:03:32 +01:00
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
|
|
// ⚠️ Note: For chips that support DMA to/from PSRAM (ESP32-S3) DMA transfers to/from PSRAM
|
|
|
|
|
|
// have extra alignment requirements. The address and size of the buffer pointed to by each
|
|
|
|
|
|
// descriptor must be a multiple of the cache line (block) size. This is 32 bytes on ESP32-S3.
|
|
|
|
|
|
// That is ensured by the `assert_eq` preceding this block.
|
|
|
|
|
|
unsafe {
|
|
|
|
|
|
let raw = alloc
|
2026-02-22 00:59:01 +01:00
|
|
|
|
.allocate_zeroed(Layout::from_size_align(len, alignment).unwrap())
|
2026-02-14 20:03:32 +01:00
|
|
|
|
.expect("failed to allocate a DMA buffer");
|
|
|
|
|
|
Box::from_raw_in(raw.as_ptr(), alloc)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-22 00:59:01 +01:00
|
|
|
|
pub struct Framebuffer {
|
|
|
|
|
|
pub width: u32,
|
|
|
|
|
|
pub height: u32,
|
|
|
|
|
|
pub bounce_buffers: DmaBounce,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-02-14 20:03:32 +01:00
|
|
|
|
impl Framebuffer {
|
2026-02-22 00:59:01 +01:00
|
|
|
|
pub fn new(
|
|
|
|
|
|
channel: DMA_CH0<'static>,
|
|
|
|
|
|
peripheral_src: AnySpi<'static>,
|
|
|
|
|
|
peripheral_dst: Dpi<'static, Blocking>,
|
2026-02-22 16:09:20 +01:00
|
|
|
|
front_porch_pixels: u32,
|
|
|
|
|
|
width_pixels: u32,
|
|
|
|
|
|
height_pixels: u32,
|
2026-02-22 00:59:01 +01:00
|
|
|
|
rows_per_window: usize,
|
|
|
|
|
|
cyclic: bool,
|
|
|
|
|
|
) -> Self {
|
2026-02-22 16:09:20 +01:00
|
|
|
|
const BYTES_PER_PIXEL: usize = core::mem::size_of::<u16>();
|
|
|
|
|
|
let buffer_size = width_pixels as usize * height_pixels as usize * BYTES_PER_PIXEL;
|
2026-02-22 00:59:01 +01:00
|
|
|
|
let burst_config = BurstConfig {
|
|
|
|
|
|
internal_memory: InternalBurstConfig::Enabled,
|
|
|
|
|
|
external_memory: ExternalBurstConfig::Size64,
|
|
|
|
|
|
};
|
|
|
|
|
|
let psram_buffer = Box::leak(allocate_dma_buffer_in(
|
|
|
|
|
|
buffer_size,
|
|
|
|
|
|
burst_config,
|
|
|
|
|
|
&PSRAM_ALLOCATOR,
|
|
|
|
|
|
));
|
|
|
|
|
|
let bounce_buffers = DmaBounce::new(
|
|
|
|
|
|
Global,
|
|
|
|
|
|
channel,
|
|
|
|
|
|
peripheral_src,
|
|
|
|
|
|
peripheral_dst,
|
|
|
|
|
|
psram_buffer,
|
2026-02-22 16:09:20 +01:00
|
|
|
|
front_porch_pixels as usize * BYTES_PER_PIXEL,
|
|
|
|
|
|
width_pixels as usize * BYTES_PER_PIXEL,
|
|
|
|
|
|
rows_per_window,
|
2026-02-22 00:59:01 +01:00
|
|
|
|
burst_config,
|
|
|
|
|
|
cyclic,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
|
|
Self {
|
2026-02-22 16:09:20 +01:00
|
|
|
|
width: width_pixels,
|
|
|
|
|
|
height: height_pixels,
|
2026-02-22 00:59:01 +01:00
|
|
|
|
bounce_buffers,
|
2026-02-14 20:03:32 +01:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn as_target_pixels(&mut self) -> &mut [Rgb565Pixel] {
|
2026-02-22 00:59:01 +01:00
|
|
|
|
bytemuck::cast_slice_mut::<_, Rgb565Pixel>(self.bounce_buffers.buffer_src)
|
2026-02-14 20:03:32 +01:00
|
|
|
|
}
|
|
|
|
|
|
}
|