vmbus_channel/
gpadl_ring.rs1use crate::ChannelClosed;
7use crate::RawAsyncChannel;
8use crate::SignalVmbusChannel;
9use crate::bus::OpenData;
10use crate::bus::OpenRequest;
11use crate::channel::DeviceResources;
12use crate::gpadl::GpadlMapView;
13use crate::gpadl::GpadlView;
14use crate::gpadl::UnknownGpadlId;
15use guestmem::GuestMemory;
16use guestmem::GuestMemoryError;
17use guestmem::LockedPages;
18use pal_async::driver::Driver;
19use ring::IncomingRing;
20use ring::OutgoingRing;
21use std::fmt::Debug;
22use std::sync::atomic::AtomicU8;
23use std::sync::atomic::AtomicU32;
24use vmbus_ring as ring;
25use vmcore::interrupt::Interrupt;
26use vmcore::notify::PolledNotify;
27
28#[derive(Clone)]
30pub struct AlignedGpadlView {
31 gpadl: GpadlView,
32 offset: u32,
33 len: u32,
34}
35
36impl AlignedGpadlView {
37 pub fn new(gpadl: GpadlView) -> Result<Self, GpadlView> {
39 if gpadl.range_count() != 1 {
40 return Err(gpadl);
41 }
42 let range = gpadl.first().unwrap();
43 if !range.len().is_multiple_of(ring::PAGE_SIZE) || range.offset() != 0 {
44 return Err(gpadl);
45 }
46 let count = range.gpns().len() as u32;
47 Ok(AlignedGpadlView {
48 gpadl,
49 offset: 0,
50 len: count,
51 })
52 }
53
54 pub fn split(
56 self,
57 offset: u32,
58 ) -> Result<(AlignedGpadlView, AlignedGpadlView), AlignedGpadlView> {
59 if offset == 0 || self.len <= offset {
60 return Err(self);
61 }
62 let left = AlignedGpadlView {
63 gpadl: self.gpadl.clone(),
64 offset: 0,
65 len: offset,
66 };
67 let right = AlignedGpadlView {
68 gpadl: self.gpadl,
69 offset,
70 len: self.len - offset,
71 };
72 Ok((left, right))
73 }
74
75 pub fn gpns(&self) -> &[u64] {
77 &self.gpadl.first().unwrap().gpns()
78 [self.offset as usize..self.offset as usize + self.len as usize]
79 }
80}
81
82struct GpadlPagedMemory {
83 _gpadl: AlignedGpadlView,
84 pages: LockedPages,
85}
86
87impl GpadlPagedMemory {
88 fn new(gpadl: AlignedGpadlView, mem: &GuestMemory) -> Result<Self, GuestMemoryError> {
89 let gpns: Vec<u64> = gpadl
91 .gpns()
92 .iter()
93 .chain(gpadl.gpns().iter().skip(1))
94 .copied()
95 .collect();
96 let pages = mem.lock_gpns(false, &gpns)?;
97 Ok(Self {
98 _gpadl: gpadl,
99 pages,
100 })
101 }
102}
103
104impl ring::PagedMemory for GpadlPagedMemory {
105 fn control(&self) -> &[AtomicU8; ring::PAGE_SIZE] {
106 self.pages.pages()[0]
107 }
108
109 #[inline]
110 fn data(&self, page: usize) -> &[AtomicU8; ring::PAGE_SIZE] {
111 self.pages.pages()[page + 1]
112 }
113
114 fn data_page_count(&self) -> usize {
115 (self.pages.pages().len() - 1) / 2
116 }
117}
118
119impl Debug for GpadlPagedMemory {
120 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
121 f.debug_struct("GpadlPagedMemory").finish()
122 }
123}
124
125#[derive(Debug)]
128pub struct GpadlRingMem {
129 ring: ring::PagedRingMem<GpadlPagedMemory>,
130}
131
132impl GpadlRingMem {
133 pub fn new(gpadl: AlignedGpadlView, mem: &GuestMemory) -> Result<Self, GuestMemoryError> {
135 Ok(Self {
136 ring: ring::PagedRingMem::new(GpadlPagedMemory::new(gpadl, mem)?),
137 })
138 }
139}
140
141impl ring::RingMem for GpadlRingMem {
142 #[inline]
143 fn len(&self) -> usize {
144 self.ring.len()
145 }
146
147 #[inline]
148 fn read_at(&self, addr: usize, data: &mut [u8]) {
149 self.ring.read_at(addr, data)
150 }
151
152 #[inline]
153 fn write_at(&self, addr: usize, data: &[u8]) {
154 self.ring.write_at(addr, data)
155 }
156
157 #[inline]
158 fn read_aligned(&self, addr: usize, data: &mut [u8]) {
159 self.ring.read_aligned(addr, data)
160 }
161
162 #[inline]
163 fn write_aligned(&self, addr: usize, data: &[u8]) {
164 self.ring.write_aligned(addr, data)
165 }
166
167 #[inline]
168 fn control(&self) -> &[AtomicU32; vmbus_ring::CONTROL_WORD_COUNT] {
169 self.ring.control()
170 }
171}
172
173#[derive(Debug, thiserror::Error)]
175pub enum Error {
176 #[error("invalid ring buffer gpadl")]
178 InvalidRingGpadl,
179 #[error(transparent)]
181 UnknownGpadlId(#[from] UnknownGpadlId),
182 #[error(transparent)]
184 Memory(#[from] GuestMemoryError),
185 #[error(transparent)]
187 Ring(#[from] ring::Error),
188 #[error("io driver error")]
190 Driver(#[source] std::io::Error),
191}
192
193pub fn make_rings(
195 mem: &GuestMemory,
196 gpadl_map: &GpadlMapView,
197 open_data: &OpenData,
198) -> Result<(IncomingRing<GpadlRingMem>, OutgoingRing<GpadlRingMem>), Error> {
199 let gpadl = AlignedGpadlView::new(gpadl_map.map(open_data.ring_gpadl_id)?)
200 .map_err(|_| Error::InvalidRingGpadl)?;
201 let (in_gpadl, out_gpadl) = gpadl
202 .split(open_data.ring_offset)
203 .map_err(|_| Error::InvalidRingGpadl)?;
204 Ok((
205 IncomingRing::new(GpadlRingMem::new(in_gpadl, mem)?)?,
206 OutgoingRing::new(GpadlRingMem::new(out_gpadl, mem)?)?,
207 ))
208}
209
210pub fn gpadl_channel(
212 driver: &(impl Driver + ?Sized),
213 resources: &DeviceResources,
214 open_request: &OpenRequest,
215 channel_idx: u16,
216) -> Result<RawAsyncChannel<GpadlRingMem>, Error> {
217 let (in_ring, out_ring) = make_rings(
218 resources.offer_resources.ring_memory(open_request),
219 &resources.gpadl_map,
220 &open_request.open_data,
221 )?;
222
223 let event = Box::new(GpadlChannelSignal {
224 event: resources.channels[channel_idx as usize]
225 .event
226 .clone()
227 .pollable(driver)
228 .map_err(Error::Driver)?,
229
230 interrupt: open_request.interrupt.clone(),
231 });
232
233 Ok(RawAsyncChannel {
234 in_ring,
235 out_ring,
236 signal: event,
237 })
238}
239
240struct GpadlChannelSignal {
241 event: PolledNotify,
242 interrupt: Interrupt,
243}
244
245impl SignalVmbusChannel for GpadlChannelSignal {
246 fn signal_remote(&self) {
247 self.interrupt.deliver();
248 }
249
250 fn poll_for_signal(
251 &self,
252 cx: &mut std::task::Context<'_>,
253 ) -> std::task::Poll<Result<(), ChannelClosed>> {
254 self.event.poll_wait(cx).map(Ok)
260 }
261}