Skip to content

Commit

Permalink
virtq: access SplitVq rings through volatile references
Browse files Browse the repository at this point in the history
The fields are used to send data to and receive data from the host and Rust cannot reason about the changes done by the host and changes to the memory that are needed for their side effect. As such, we need to use volatile references for them.
  • Loading branch information
cagatay-y committed May 2, 2024
1 parent 880bf76 commit b79e219
Show file tree
Hide file tree
Showing 2 changed files with 104 additions and 60 deletions.
4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ shell = ["simple-shell"]

[dependencies]
hermit-macro = { path = "hermit-macro" }
virtio-def = { path = "virtio-def", features = ["zerocopy"] }
virtio-def = { path = "virtio-def" }
ahash = { version = "0.8", default-features = false }
align-address = "0.1"
bit_field = "0.10"
Expand Down Expand Up @@ -101,7 +101,7 @@ build-time = "0.1.3"
async-trait = "0.1.80"
async-lock = { version = "3.3.0", default-features = false }
simple-shell = { version = "0.0.1", optional = true }
volatile = "0.5.4"
volatile = { version = "0.5.4", features = ["unstable"] }

[dependencies.smoltcp]
version = "0.11"
Expand Down
160 changes: 102 additions & 58 deletions src/drivers/virtio/virtqueue/split.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,14 @@ use alloc::rc::Rc;
use alloc::vec::Vec;
use core::alloc::{Allocator, Layout};
use core::cell::RefCell;
use core::marker::PhantomPinned;
use core::mem::{size_of, MaybeUninit};
use core::ptr;
use core::pin::Pin;
use core::ptr::{self, NonNull};

use virtio_def::num::{le16, le32, le64};
use zerocopy::{FromBytes, FromZeroes};
use volatile::access::ReadOnly;
use volatile::{map_field, VolatilePtr, VolatileRef};

#[cfg(not(feature = "pci"))]
use super::super::transport::mmio::{ComCfg, NotifCfg, NotifCtrl};
Expand Down Expand Up @@ -47,13 +50,19 @@ impl Descriptor {
}
}

struct DescrTable {
_pin: PhantomPinned,
descr_table: [MaybeUninit<Descriptor>],
}

// The generic structure eases the creation of the layout for the statically
// sized portion of [AvailRing] and [UsedRing]. This way, to be allocated they
// only need to be extended with the dynamic portion.
#[repr(C)]
struct GenericRing<T: ?Sized> {
flags: le16,
index: le16,
_pin: PhantomPinned,

// Rust does not allow a field other than the last one to be unsized.
// Unfortunately, this is not the case with the layout in the specification.
Expand All @@ -67,26 +76,18 @@ const RING_AND_EVENT_ERROR: &str = "ring_and_event should have at least enough e
type AvailRing = GenericRing<[MaybeUninit<le16>]>;

impl AvailRing {
fn ring_ref(&self) -> &[MaybeUninit<le16>] {
self.ring_and_event
.split_last()
.expect(RING_AND_EVENT_ERROR)
.1
}

fn ring_mut(&mut self) -> &mut [MaybeUninit<le16>] {
self.ring_and_event
.split_last_mut()
.expect(RING_AND_EVENT_ERROR)
.1
fn ring_ptr<A: volatile::access::Access>(
volatile_self: VolatilePtr<'_, Self, A>,
) -> VolatilePtr<'_, [MaybeUninit<le16>], A> {
let ring_and_event_ptr = map_field!(volatile_self.ring_and_event);
ring_and_event_ptr.split_at(ring_and_event_ptr.len()).0
}

fn event_ref(&self) -> &MaybeUninit<le16> {
self.ring_and_event.last().expect(RING_AND_EVENT_ERROR)
}

fn event_mut(&mut self) -> &MaybeUninit<le16> {
self.ring_and_event.last_mut().expect(RING_AND_EVENT_ERROR)
fn event_ptr<A: volatile::access::Access>(
volatile_self: VolatilePtr<'_, Self, A>,
) -> VolatilePtr<'_, MaybeUninit<le16>, A> {
let ring_and_event_ptr = map_field!(volatile_self.ring_and_event);
ring_and_event_ptr.index(ring_and_event_ptr.len() - 1)
}
}

Expand All @@ -96,32 +97,53 @@ type UsedRing = GenericRing<[u8]>;

// Used ring is not supposed to be modified by the driver. Thus, we only have _ref methods (and not _mut methods).
impl UsedRing {
fn ring_ref(&self) -> &[UsedElem] {
// The last two bytes belong to the event field
UsedElem::slice_from(
&self.ring_and_event[..(self.ring_and_event.len() - size_of::<le16>())],
)
.expect(RING_AND_EVENT_ERROR)
fn ring_ptr<A: volatile::access::Access>(
volatile_self: VolatilePtr<'_, Self, A>,
) -> VolatilePtr<'_, [UsedElem], A> {
let ring_and_event_ptr = map_field!(volatile_self.ring_and_event);
let ring_len = (ring_and_event_ptr.len() - size_of::<le16>()) / size_of::<UsedElem>();

unsafe {
ring_and_event_ptr.map(|ring_and_event_ptr| {
NonNull::slice_from_raw_parts(ring_and_event_ptr.cast::<UsedElem>(), ring_len)
})
}
}

fn event_ref(&self) -> &le16 {
le16::ref_from_suffix(&self.ring_and_event).expect(RING_AND_EVENT_ERROR)
fn event_ptr<A: volatile::access::Access>(
volatile_self: VolatilePtr<'_, Self, A>,
) -> VolatilePtr<'_, le16, A> {
let ring_and_event_ptr = map_field!(volatile_self.ring_and_event);
let ring_and_event_len = ring_and_event_ptr.len();
let event_bytes_ptr = ring_and_event_ptr
.split_at(ring_and_event_len - size_of::<le16>())
.1;

unsafe { event_bytes_ptr.map(|event_bytes| event_bytes.cast::<le16>()) }
}
}

#[repr(C)]
#[derive(Copy, Clone, FromZeroes, FromBytes)]
#[derive(Copy, Clone)]
struct UsedElem {
id: le32,
len: le32,
}

struct DescrRing {
read_idx: u16,
descr_table: Box<[MaybeUninit<Descriptor>], DeviceAlloc>,
ref_ring: Box<[Option<Box<TransferToken>>]>,
avail_ring: Box<AvailRing, DeviceAlloc>,
used_ring: Box<UsedRing, DeviceAlloc>,
token_ring: Box<[Option<Box<TransferToken>>]>,
descr_table_ref: VolatileRef<'static, DescrTable>,
avail_ring_ref: VolatileRef<'static, AvailRing>,
used_ring_ref: VolatileRef<'static, UsedRing, ReadOnly>,

// The following fields should not be used directly, as they do not ensure volatile access.
#[deprecated = "_pinned_* fields are only for keeping the objects around. All access should be done through the respective *_ref fields."]
_pinned_descr_table: Pin<Box<DescrTable, DeviceAlloc>>,
#[deprecated = "_pinned_* fields are only for keeping the objects around. All access should be done through the respective *_ref fields."]
_pinned_avail_ring: Pin<Box<AvailRing, DeviceAlloc>>,
#[deprecated = "_pinned_* fields are only for keeping the objects around. All access should be done through the respective *_ref fields."]
_pinned_used_ring: Pin<Box<UsedRing, DeviceAlloc>>,
}

impl DescrRing {
Expand Down Expand Up @@ -228,32 +250,37 @@ impl DescrRing {
)
};

self.descr_table[write_indx] = MaybeUninit::new(descriptor);
let descr_table = self.descr_table_ref.as_mut_ptr();
map_field!(descr_table.descr_table)
.index(write_indx)
.write(MaybeUninit::new(descriptor));

desc_cnt += 1;
len -= 1;
}

self.ref_ring[index] = Some(Box::new(tkn));
let idx = self.avail_ring.index.get();
let len = self.avail_ring.ring_ref().len();
self.avail_ring.ring_mut()[idx as usize % len] = MaybeUninit::new((index as u16).into());
self.token_ring[index] = Some(Box::new(tkn));

let avail_ring = self.avail_ring_ref.as_mut_ptr();
let idx = map_field!(avail_ring.index).read().get();
let len = self.token_ring.len();
AvailRing::ring_ptr(avail_ring)
.index(idx as usize % len)
.write(MaybeUninit::new((index as u16).into()));

memory_barrier();
self.avail_ring.index = (self.avail_ring.index.get().wrapping_add(1)).into();
map_field!(avail_ring.index).update(|val| (val.get().wrapping_add(1)).into());

(0, 0)
}

fn poll(&mut self) {
while self.read_idx
!= unsafe { ptr::read_volatile(ptr::addr_of!(self.used_ring.index)).get() }
{
let cur_ring_index = self.read_idx as usize % self.used_ring.ring_ref().len();
let used_elem =
unsafe { ptr::read_volatile(&self.used_ring.ring_ref()[cur_ring_index]) };

let mut tkn = self.ref_ring[used_elem.id.get() as usize].take().expect(
let used_ring = self.used_ring_ref.as_ptr();
while self.read_idx != map_field!(used_ring.index).read().get() {
let cur_ring_index = self.read_idx as usize % self.token_ring.len();
let used_elem = UsedRing::ring_ptr(used_ring).index(cur_ring_index).read();

let mut tkn = self.token_ring[used_elem.id.get() as usize].take().expect(
"The buff_id is incorrect or the reference to the TransferToken was misplaced.",
);

Expand All @@ -276,15 +303,18 @@ impl DescrRing {
}

fn drv_enable_notif(&mut self) {
self.avail_ring.flags = 0.into();
let avail_ring = self.avail_ring_ref.as_mut_ptr();
map_field!(avail_ring.flags).write(0.into());
}

fn drv_disable_notif(&mut self) {
self.avail_ring.flags = 1.into();
let avail_ring = self.avail_ring_ref.as_mut_ptr();
map_field!(avail_ring.flags).write(1.into());
}

fn dev_is_notif(&self) -> bool {
self.used_ring.flags.get() & 1 == 0
let used_ring = self.used_ring_ref.as_ptr();
map_field!(used_ring.flags).read().get() & 1 == 0
}
}

Expand Down Expand Up @@ -374,10 +404,15 @@ impl Virtq for SplitVq {
let size = vq_handler.set_vq_size(size.0);
const ALLOCATOR: DeviceAlloc = DeviceAlloc;

// Allocate heap memory via a vec, leak and cast
let descr_table = Box::new_uninit_slice_in(size.into(), ALLOCATOR);
let mut descr_table = unsafe {
core::mem::transmute::<
Box<[MaybeUninit<Descriptor>], DeviceAlloc>,
Box<DescrTable, DeviceAlloc>,
>(Box::new_uninit_slice_in(size.into(), ALLOCATOR))
};
let descr_table_ref = unsafe { VolatileRef::new(NonNull::from(descr_table.as_mut())) };

let avail_ring = {
let mut avail_ring = {
let ring_and_event_len = usize::from(size) + 1;
let allocation = ALLOCATOR
.allocate(
Expand All @@ -396,6 +431,7 @@ impl Virtq for SplitVq {
)
}
};
let avail_ring_ref = unsafe { VolatileRef::new(NonNull::from(avail_ring.as_mut())) };

let used_ring = {
let ring_and_event_layout = Layout::array::<UsedElem>(size.into())
Expand All @@ -422,10 +458,12 @@ impl Virtq for SplitVq {
)
}
};
let used_ring_ref =
unsafe { VolatileRef::new_read_only(NonNull::from(used_ring.as_ref())) };

// Provide memory areas of the queues data structures to the device
vq_handler.set_ring_addr(paging::virt_to_phys(VirtAddr::from(
descr_table.as_ptr().expose_provenance(),
ptr::from_mut(descr_table.as_mut()).expose_provenance(),
)));
// As usize is safe here, as the *mut EventSuppr raw pointer is a thin pointer of size usize
vq_handler.set_drv_ctrl_addr(paging::virt_to_phys(VirtAddr::from(
Expand All @@ -435,15 +473,21 @@ impl Virtq for SplitVq {
ptr::from_ref(used_ring.as_ref()).expose_provenance(),
)));

// We have to set the deprecated fields.
#[allow(deprecated)]
let descr_ring = DescrRing {
read_idx: 0,
ref_ring: core::iter::repeat_with(|| None)
token_ring: core::iter::repeat_with(|| None)
.take(size.into())
.collect::<Vec<_>>()
.into_boxed_slice(),
descr_table,
avail_ring,
used_ring,
descr_table_ref,
avail_ring_ref,
used_ring_ref,

_pinned_descr_table: Box::into_pin(descr_table),
_pinned_avail_ring: Box::into_pin(avail_ring),
_pinned_used_ring: Box::into_pin(used_ring),
};

let notif_ctrl = NotifCtrl::new(ptr::with_exposed_provenance_mut(
Expand Down

0 comments on commit b79e219

Please sign in to comment.