L4Re Operating System Framework
Interface and Usage Documentation
Loading...
Searching...
No Matches
region_mapping
Go to the documentation of this file.
1// -*- Mode: C++ -*-
2// vim:ft=cpp
7/*
8 * (c) 2008-2009 Adam Lackorzynski <adam@os.inf.tu-dresden.de>,
9 * Alexander Warg <warg@os.inf.tu-dresden.de>,
10 * Björn Döbel <doebel@os.inf.tu-dresden.de>
11 * economic rights: Technische Universität Dresden (Germany)
12 *
13 * License: see LICENSE.spdx (in this directory or the directories above)
14 */
15
16#pragma once
17
18#include <l4/cxx/avl_map>
19#include <l4/sys/types.h>
20#include <l4/re/rm>
21
22namespace L4Re { namespace Util {
23
24class Region
25{
26private:
27 l4_addr_t _start, _end;
28#ifdef CONFIG_L4RE_REGION_INFO
29 char _dbg_name[40]; // Not a 0-terminating string
30 unsigned char _dbg_name_len = 0;
31 static_assert(sizeof(_dbg_name) < 256);
32 Rm::Offset _dbg_backing_offset = 0;
33#endif
34
35public:
36 Region() noexcept : _start(~0UL), _end(~0UL) {}
37 Region(l4_addr_t addr) noexcept : _start(addr), _end(addr) {}
38 Region(l4_addr_t start, l4_addr_t end) noexcept
39 : _start(start), _end(end) {}
40 Region(l4_addr_t start, l4_addr_t end,
41 char const *name, unsigned name_len,
42 Rm::Offset backing_offset) noexcept
43 : _start(start), _end(end)
44 {
45#ifdef CONFIG_L4RE_REGION_INFO
46 _dbg_name_len = name_len > sizeof(_dbg_name)
47 ? sizeof(_dbg_name) : name_len;
48 for (unsigned i = 0; i < _dbg_name_len; ++i)
49 _dbg_name[i] = name[i];
50
51 _dbg_backing_offset = backing_offset;
52#else
53 (void)name;
54 (void)name_len;
55 (void)backing_offset;
56#endif
57 }
58 l4_addr_t start() const noexcept { return _start; }
59 l4_addr_t end() const noexcept { return _end; }
60 unsigned long size() const noexcept { return end() - start() + 1; }
61 bool invalid() const noexcept { return _start == ~0UL && _end == ~0UL; }
62 bool operator < (Region const &o) const noexcept
63 { return end() < o.start(); }
64 bool contains(Region const &o) const noexcept
65 { return o.start() >= start() && o.end() <= end(); }
66 bool operator == (Region const &o) const noexcept
67 { return o.start() == start() && o.end() == end(); }
68 ~Region() noexcept {}
69
70#ifdef CONFIG_L4RE_REGION_INFO
71 char const *name() const { return _dbg_name; }
72 unsigned char name_len() const { return _dbg_name_len; }
73 Rm::Offset backing_offset() const { return _dbg_backing_offset; }
74#else
75 char const *name() const { return "N/A"; }
76 unsigned char name_len() const { return 3; }
77 Rm::Offset backing_offset() const { return 0; }
78#endif
79};
80
81template<typename DS, typename OPS>
82class Region_handler
83{
84private:
85 L4Re::Rm::Offset _offs;
86 DS _mem;
87 l4_cap_idx_t _client_cap = L4_INVALID_CAP;
88 L4Re::Rm::Region_flags _flags;
89
90public:
91 typedef DS Dataspace;
92 typedef OPS Ops;
93 typedef typename OPS::Map_result Map_result;
94
95 Region_handler() noexcept : _offs(0), _mem(), _flags() {}
96
97 Region_handler(Dataspace const &mem, l4_cap_idx_t client_cap,
98 L4Re::Rm::Offset offset = 0,
99 L4Re::Rm::Region_flags flags = L4Re::Rm::Region_flags(0)) noexcept
100 : _offs(offset), _mem(mem), _client_cap(client_cap), _flags(flags)
101 {}
102
103 Dataspace const &memory() const noexcept
104 { return _mem; }
105
106 l4_cap_idx_t client_cap_idx() const noexcept
107 { return _client_cap; }
108
109 L4Re::Rm::Offset offset() const noexcept
110 { return _offs; }
111
112 constexpr bool is_ro() const noexcept
113 { return !(_flags & L4Re::Rm::F::W); }
114
115 L4Re::Rm::Region_flags caching() const noexcept
116 { return _flags & L4Re::Rm::F::Caching_mask; }
117
118 L4Re::Rm::Region_flags flags() const noexcept
119 { return _flags; }
120
121 Region_handler operator + (l4_int64_t offset) const noexcept
122 { Region_handler n = *this; n._offs += offset; return n; }
123
124 void free(l4_addr_t start, unsigned long size) const noexcept
125 { Ops::free(this, start, size); }
126
127 int map(l4_addr_t addr, Region const &r, bool writable,
128 Map_result *result) const
129 { return Ops::map(this, addr, r, writable, result); }
130
131 int map_info(l4_addr_t *start_addr, l4_addr_t *end_addr) const
132 { return Ops::map_info(this, start_addr, end_addr); }
133};
134
135template<typename Hdlr, template<typename T> class Alloc>
136class Region_map
137{
138protected:
139 typedef cxx::Avl_map<Region, Hdlr, cxx::Lt_functor, Alloc> Tree;
140
141 Tree _rm;
142 Tree _am;
143
144private:
145 l4_addr_t _start;
146 l4_addr_t _end;
147
148protected:
149 void set_limits(l4_addr_t start, l4_addr_t end) noexcept
150 {
151 _start = start;
152 _end = end;
153 }
154
155public:
156 typedef typename Tree::Item_type Item;
157 typedef typename Tree::Node Node;
158 typedef typename Tree::Key_type Key_type;
159 typedef Hdlr Region_handler;
160
161 typedef typename Tree::Iterator Iterator;
162 typedef typename Tree::Const_iterator Const_iterator;
163 typedef typename Tree::Rev_iterator Rev_iterator;
164 typedef typename Tree::Const_rev_iterator Const_rev_iterator;
165
166 Iterator begin() noexcept { return _rm.begin(); }
167 Const_iterator begin() const noexcept { return _rm.begin(); }
168 Iterator end() noexcept { return _rm.end(); }
169 Const_iterator end() const noexcept { return _rm.end(); }
170
171 Iterator area_begin() noexcept { return _am.begin(); }
172 Const_iterator area_begin() const noexcept { return _am.begin(); }
173 Iterator area_end() noexcept { return _am.end(); }
174 Const_iterator area_end() const noexcept { return _am.end(); }
175 Node area_find(Key_type const &c) const noexcept { return _am.find_node(c); }
176
177 l4_addr_t min_addr() const noexcept { return _start; }
178 l4_addr_t max_addr() const noexcept { return _end; }
179
180 Region_map(l4_addr_t start, l4_addr_t end) noexcept : _start(start), _end(end) {}
181
182 Node find(Key_type const &key) const noexcept
183 {
184 Node n = _rm.find_node(key);
185 if (!n)
186 return Node();
187
188 // 'find' should find any region overlapping with the searched one, the
189 // caller should check for further requirements
190 if (0)
191 if (!n->first.contains(key))
192 return Node();
193
194 return n;
195 }
196
197 Node lower_bound(Key_type const &key) const noexcept
198 {
199 Node n = _rm.lower_bound_node(key);
200 return n;
201 }
202
203 Node lower_bound_area(Key_type const &key) const noexcept
204 {
205 Node n = _am.lower_bound_node(key);
206 return n;
207 }
208
209 l4_addr_t attach_area(l4_addr_t addr, unsigned long size,
210 L4Re::Rm::Flags flags = L4Re::Rm::Flags(0),
211 unsigned char align = L4_PAGESHIFT) noexcept
212 {
213 if (size < 2)
214 return L4_INVALID_ADDR;
215
216 Region c;
217
218 if (!(flags & L4Re::Rm::F::Search_addr))
219 {
220 c = Region(addr, addr + size - 1);
221 Node r = _am.find_node(c);
222 if (r)
223 return L4_INVALID_ADDR;
224 }
225
226 while (flags & L4Re::Rm::F::Search_addr)
227 {
228 if (addr < min_addr() || (addr + size - 1) > max_addr())
229 addr = min_addr();
230
231 addr = find_free(addr, max_addr(), size, align, flags);
232 if (addr == L4_INVALID_ADDR)
233 return L4_INVALID_ADDR;
234
235 c = Region(addr, addr + size - 1);
236 Node r = _am.find_node(c);
237 if (!r)
238 break;
239
240 if (r->first.end() >= max_addr())
241 return L4_INVALID_ADDR;
242
243 addr = r->first.end() + 1;
244 }
245
246 if (_am.insert(c, Hdlr(typename Hdlr::Dataspace(), 0, 0, flags.region_flags())).second == 0)
247 return addr;
248
249 return L4_INVALID_ADDR;
250 }
251
252 bool detach_area(l4_addr_t addr) noexcept
253 {
254 if (_am.remove(addr))
255 return false;
256
257 return true;
258 }
259
260 void *attach(void *addr, unsigned long size, Hdlr const &hdlr,
261 L4Re::Rm::Flags attach_flags = L4Re::Rm::Flags(0),
262 unsigned char align = L4_PAGESHIFT,
263 char const *name = nullptr, unsigned name_len = 0,
264 L4Re::Rm::Offset backing_offset = 0) noexcept
265 {
266 if (size < 2)
267 return L4_INVALID_PTR;
268
269 l4_addr_t beg, end;
270 int err = hdlr.map_info(&beg, &end);
271 if (err > 0)
272 {
273 // Mapping address determined by underlying dataspace. Make sure we
274 // prevent any additional alignment. We already know the place!
275 beg += hdlr.offset();
276 end = beg + size - 1U;
277 align = L4_PAGESHIFT;
278
279 // In case of exact mappings, the supplied address must match because
280 // we cannot remap.
281 if (!(attach_flags & L4Re::Rm::F::Search_addr)
282 && reinterpret_cast<l4_addr_t>(addr) != beg)
283 return L4_INVALID_PTR;
284
285 // When searching for a suitable address, the start must cover the
286 // dataspace beginning to "find" the right spot.
287 if ((attach_flags & L4Re::Rm::F::Search_addr)
288 && reinterpret_cast<l4_addr_t>(addr) > beg)
289 return L4_INVALID_PTR;
290 }
291 else if (err == 0)
292 {
293 beg = reinterpret_cast<l4_addr_t>(addr);
294 end = max_addr();
295 }
296 else if (err < 0)
297 return L4_INVALID_PTR;
298
299 if (attach_flags & L4Re::Rm::F::In_area)
300 {
301 Node r = _am.find_node(Region(beg, beg + size - 1));
302 if (!r || (r->second.flags() & L4Re::Rm::F::Reserved))
303 return L4_INVALID_PTR;
304
305 end = r->first.end();
306 }
307
308 if (attach_flags & L4Re::Rm::F::Search_addr)
309 {
310 beg = find_free(beg, end, size, align, attach_flags);
311 if (beg == L4_INVALID_ADDR)
312 return L4_INVALID_PTR;
313 }
314
315 if (!(attach_flags & (L4Re::Rm::F::Search_addr | L4Re::Rm::F::In_area))
316 && _am.find_node(Region(beg, beg + size - 1)))
317 return L4_INVALID_PTR;
318
319 if (beg < min_addr() || beg + size - 1 > end)
320 return L4_INVALID_PTR;
321
322 if (_rm.insert(Region(beg, beg + size - 1,
323 name, name_len, backing_offset), hdlr).second
324 == 0)
325 return reinterpret_cast<void*>(beg);
326
327 return L4_INVALID_PTR;
328 }
329
330 int detach(void *addr, unsigned long sz, unsigned flags,
331 Region *reg, Hdlr *hdlr) noexcept
332 {
333 l4_addr_t a = reinterpret_cast<l4_addr_t>(addr);
334 Region dr(a, a + sz - 1);
335 Region res(~0UL, 0);
336
337 Node r = find(dr);
338 if (!r)
339 return -L4_ENOENT;
340
341 Region g = r->first;
342 Hdlr const &h = r->second;
343
344 if (flags & L4Re::Rm::Detach_overlap || dr.contains(g))
345 {
346 // Successful removal of the AVL tree item also frees the node.
347 Hdlr h_copy = h;
348
349 if (_rm.remove(g))
350 return -L4_ENOENT;
351
352 if (!(flags & L4Re::Rm::Detach_keep) && (h_copy.flags() & L4Re::Rm::F::Detach_free))
353 h_copy.free(0, g.size());
354
355 if (hdlr)
356 *hdlr = h_copy;
357 if (reg)
358 *reg = g;
359
360 if (find(dr))
362 else
363 return Rm::Detached_ds;
364 }
365 else if (dr.start() <= g.start())
366 {
367 // Move the start of a region.
368
369 if (!(flags & L4Re::Rm::Detach_keep) && (h.flags() & L4Re::Rm::F::Detach_free))
370 h.free(0, dr.end() + 1 - g.start());
371
372 unsigned long sz = dr.end() + 1 - g.start();
373 Item &cn = const_cast<Item &>(*r);
374 cn.first = Region(dr.end() + 1, g.end(), g.name(), g.name_len(),
375 g.backing_offset() + sz);
376 cn.second = cn.second + sz;
377 if (hdlr)
378 *hdlr = Hdlr();
379 if (reg)
380 *reg = Region(g.start(), dr.end());
381 if (find(dr))
383 else
384 return Rm::Kept_ds;
385 }
386 else if (dr.end() >= g.end())
387 {
388 // Move the end of a region.
389
390 if (!(flags & L4Re::Rm::Detach_keep)
391 && (h.flags() & L4Re::Rm::F::Detach_free))
392 h.free(dr.start() - g.start(), g.end() + 1 - dr.start());
393
394 Item &cn = const_cast<Item &>(*r);
395 cn.first = Region(g.start(), dr.start() - 1, g.name(), g.name_len(),
396 g.backing_offset());
397 if (hdlr)
398 *hdlr = Hdlr();
399 if (reg)
400 *reg = Region(dr.start(), g.end());
401
402 if (find(dr))
404 else
405 return Rm::Kept_ds;
406 }
407 else if (g.contains(dr))
408 {
409 // Split a single region that contains the new region.
410
411 if (!(flags & L4Re::Rm::Detach_keep) && (h.flags() & L4Re::Rm::F::Detach_free))
412 h.free(dr.start() - g.start(), dr.size());
413
414 // First move the end off the existing region before the new one.
415 Item &cn = const_cast<Item &>(*r);
416 cn.first = Region(g.start(), dr.start()-1, g.name(), g.name_len(),
417 g.backing_offset());
418
419 int err;
420
421 // Insert a second region for the remaining tail of
422 // the old existing region.
423 auto tail_sz = dr.end() + 1 - g.start();
424 err = _rm.insert(Region(dr.end() + 1, g.end(), g.name(), g.name_len(),
425 g.backing_offset() + tail_sz),
426 h + tail_sz).second;
427
428 if (err)
429 return err;
430
431 if (hdlr)
432 *hdlr = h;
433 if (reg)
434 *reg = dr;
435 return Rm::Split_ds;
436 }
437
438 return -L4_ENOENT;
439 }
440
441 l4_addr_t find_free(l4_addr_t start, l4_addr_t end, l4_addr_t size,
442 unsigned char align, L4Re::Rm::Flags attach_flags) const noexcept;
443};
444
445template<typename Hdlr, template<typename T> class Alloc>
447Region_map<Hdlr, Alloc>::find_free(l4_addr_t start, l4_addr_t end,
448 unsigned long size, unsigned char align, L4Re::Rm::Flags attach_flags) const noexcept
449{
450 l4_addr_t addr = start;
451
452 if (addr == ~0UL || addr < min_addr() || addr >= end)
453 addr = min_addr();
454
455 addr = l4_round_size(addr, align);
456 Node r;
457
458 for (;;)
459 {
460 if (addr > 0 && addr - 1 > end - size)
461 return L4_INVALID_ADDR;
462
463 Region c(addr, addr + size - 1);
464 r = _rm.find_node(c);
465
466 if (!r)
467 {
468 if (!(attach_flags & L4Re::Rm::F::In_area) && (r = _am.find_node(c)))
469 {
470 if (r->first.end() > end - size)
471 return L4_INVALID_ADDR;
472
473 addr = l4_round_size(r->first.end() + 1, align);
474 continue;
475 }
476
477 break;
478 }
479 else if (r->first.end() > end - size)
480 return L4_INVALID_ADDR;
481
482 addr = l4_round_size(r->first.end() + 1, align);
483 }
484
485 if (!r)
486 return addr;
487
488 return L4_INVALID_ADDR;
489}
490
491}}
AVL map.
@ Detached_ds
Detached data sapce.
Definition rm:91
@ Detach_again
Detached data space, more to do.
Definition rm:96
@ Split_ds
Splitted data space, and done.
Definition rm:93
@ Kept_ds
Kept data space.
Definition rm:92
@ Detach_overlap
Do an unmap of all overlapping regions.
Definition rm:237
@ Detach_keep
Do not free the detached data space, ignore the F::Detach_free.
Definition rm:246
ITEM_TYPE Item_type
Type for the items store in the set.
Definition avl_set:147
unsigned long l4_addr_t
Address type.
Definition l4int.h:34
signed long long l4_int64_t
Signed 64bit value.
Definition l4int.h:30
unsigned long l4_cap_idx_t
Capability selector type.
Definition types.h:336
@ L4_INVALID_CAP
Invalid capability selector.
Definition consts.h:153
@ L4_ENOENT
No such entity.
Definition err.h:34
#define L4_INVALID_PTR
Invalid address as pointer type.
Definition consts.h:512
#define L4_PAGESHIFT
Size of a page, log2-based.
Definition consts.h:26
l4_addr_t l4_round_size(l4_addr_t value, unsigned char bits) L4_NOTHROW
Round value up to the next alignment with bits size.
Definition consts.h:484
@ L4_INVALID_ADDR
Invalid address.
Definition consts.h:505
Common L4 ABI Data Types.
Documentation of the L4 Runtime Environment utility functionality in C++.
Definition l4re.dox:21
L4Re C++ Interfaces.
Definition cmd_control:14
Region mapper interface.
@ Reserved
Region is reserved (blocked).
Definition rm:152
@ Detach_free
Free the portion of the data space after detach.
Definition rm:148
@ W
Writable region.
Definition rm:135
@ Caching_mask
Mask of all Rm cache bits.
Definition rm:156
@ Search_addr
Search for a suitable address range.
Definition rm:113
@ In_area
Search only in area, or map into area.
Definition rm:115