34 namespace L4Re {
namespace Util {
41 Region() throw() : _start(~0UL), _end(~0UL) {}
42 Region(
l4_addr_t addr)
throw() : _start(addr), _end(addr) {}
44 : _start(start), _end(end) {}
45 l4_addr_t start()
const throw() {
return _start; }
46 l4_addr_t end()
const throw() {
return _end; }
47 unsigned long size()
const throw() {
return end() - start() + 1; }
48 bool invalid()
const throw() {
return _start == ~0UL && _end == ~0UL; }
49 bool operator < (Region
const &o)
const throw()
50 {
return end() < o.start(); }
51 bool contains(Region
const &o)
const throw()
52 {
return o.start() >= start() && o.end() <= end(); }
53 bool operator == (Region
const &o)
const throw()
54 {
return o.start() == start() && o.end() == end(); }
58 template<
typename DS,
typename OPS >
65 unsigned short _flags;
70 typedef typename OPS::Map_result Map_result;
72 Region_handler() throw() : _offs(0), _mem(), _flags() {}
73 Region_handler(Dataspace
const &mem,
l4_cap_idx_t client_cap,
74 l4_addr_t offset = 0,
unsigned flags = 0) throw()
75 : _offs(offset), _mem(mem), _client_cap(client_cap), _flags(flags)
77 Dataspace
const &memory()
const throw() {
return _mem; }
78 l4_cap_idx_t client_cap_idx()
const throw() {
return _client_cap; }
79 l4_addr_t offset()
const throw() {
return _offs; }
82 unsigned flags()
const throw() {
return _flags; }
84 Region_handler operator + (
long offset)
const throw()
85 { Region_handler n = *
this; n._offs += offset;
return n; }
88 { Ops::unmap(
this, va, ds_offs, size); }
90 void free(
l4_addr_t start,
unsigned long size)
const throw()
91 { Ops::free(
this, start, size); }
93 void take()
const { Ops::take(
this); }
94 void release()
const { Ops::release(
this); }
96 int map(
l4_addr_t addr, Region
const &r,
bool writable, Map_result *result)
const 97 {
return Ops::map(
this, addr, r, writable, result); }
102 template<
typename Hdlr,
template<
typename T>
class Alloc >
122 typedef typename Tree::Item_type Item;
123 typedef typename Tree::Node Node;
124 typedef typename Tree::Key_type Key_type;
125 typedef Hdlr Region_handler;
127 typedef typename Tree::Iterator Iterator;
128 typedef typename Tree::Const_iterator Const_iterator;
129 typedef typename Tree::Rev_iterator Rev_iterator;
130 typedef typename Tree::Const_rev_iterator Const_rev_iterator;
132 Iterator begin() throw() {
return _rm.
begin(); }
133 Const_iterator begin()
const throw() {
return _rm.begin(); }
134 Iterator end() throw() {
return _rm.end(); }
135 Const_iterator end()
const throw() {
return _rm.end(); }
137 Iterator area_begin() throw() {
return _am.begin(); }
138 Const_iterator area_begin()
const throw() {
return _am.begin(); }
139 Iterator area_end() throw() {
return _am.end(); }
140 Const_iterator area_end()
const throw() {
return _am.end(); }
141 Node area_find(Key_type
const &c)
const throw() {
return _am.find_node(c); }
150 l4_addr_t min_addr()
const throw() {
return _start; }
151 l4_addr_t max_addr()
const throw() {
return _end; }
156 Node find(Key_type
const &key)
const throw()
158 Node n = _rm.find_node(key);
165 if (!n->first.contains(key))
171 Node lower_bound(Key_type
const &key)
const throw()
173 Node n = _rm.lower_bound_node(key);
177 Node lower_bound_area(Key_type
const &key)
const throw()
179 Node n = _am.lower_bound_node(key);
184 unsigned flags = None,
193 if (!(flags & Search))
195 c = Region(addr, addr + size - 1);
196 Node r = _am.find_node(c);
201 while (flags & Search)
203 if (addr < min_addr() || (addr + size - 1) > max_addr())
205 addr = find_free(addr, max_addr(), size, align, flags);
209 c = Region(addr, addr + size - 1);
210 Node r = _am.find_node(c);
214 if (r->first.end() >= max_addr())
217 addr = r->first.end() + 1;
220 if (_am.insert(c, Hdlr(
typename Hdlr::Dataspace(), 0, flags)).second == 0)
228 if (_am.remove(addr))
234 void *attach(
void *addr,
unsigned long size, Hdlr
const &hdlr,
235 unsigned flags = None,
unsigned char align =
L4_PAGESHIFT)
throw()
245 Node r = _am.find_node(Region(beg, beg + size - 1));
249 end = r->first.end();
254 beg = find_free(beg, end, size, align, flags);
259 if (!(flags & (Search | In_area)) && _am.find_node(Region(beg, beg + size - 1)))
262 if (beg < min_addr() || beg + size -1 > end)
265 if (_rm.insert(Region(beg, beg + size -1), hdlr).second == 0)
271 int detach(
void *addr,
unsigned long sz,
unsigned flags,
272 Region *reg, Hdlr *hdlr)
throw()
282 Hdlr
const &h = r->second;
300 else if (dr.start() <= g.start())
305 h.free(0, dr.end() + 1 - g.start());
307 unsigned long sz = dr.end() + 1 - g.start();
308 Item *cn =
const_cast<Item*
>((Item
const *)r);
309 cn->first = Region(dr.end() + 1, g.end());
310 cn->second = cn->second + sz;
311 if (hdlr) *hdlr = Hdlr();
312 if (reg) *reg = Region(g.start(), dr.end());
318 else if (dr.end() >= g.end())
323 h.free(dr.start() - g.start(), g.end() + 1 - dr.start());
325 Item *cn =
const_cast<Item*
>((Item
const*)r);
326 cn->first = Region(g.start(), dr.start() -1);
327 if (hdlr) *hdlr = Hdlr();
328 if (reg) *reg = Region(dr.start(), g.end());
335 else if (g.contains(dr))
340 h.free(dr.start() - g.start(), dr.size());
343 const_cast<Item*
>((Item
const *)r)->first = Region(g.start(), dr.start()-1);
349 err = _rm.insert(Region(dr.end() + 1, g.end()), h + (dr.end() + 1 - g.start())).second;
362 unsigned char align,
unsigned flags)
const throw();
367 template<
typename Hdlr,
template<
typename T>
class Alloc >
370 unsigned long size,
unsigned char align,
unsigned flags)
const throw()
374 if (addr == ~0UL || addr < min_addr() || addr >= end)
382 if (addr > 0 && addr - 1 > end - size)
385 Region c(addr, addr + size - 1);
386 r = _rm.find_node(c);
390 if (!(flags & In_area) && (r = _am.find_node(c)))
392 if (r->first.end() > end - size)
400 else if (r->first.end() > end - size)
Do not free the detached data space, ignore the Detach_free.
Invalid capability selector.
Common L4 ABI Data Types.
l4_addr_t l4_round_size(l4_umword_t value, unsigned char bits) L4_NOTHROW
Round value up to the next alignment with bits size.
Splitted data space, and done.
unsigned long l4_cap_idx_t
L4 Capability selector Type.
Search for a suitable address range.
Do an unmap of all overlapping regions.
Detached data space, more to do.
#define L4_INVALID_PTR
Invalid address as pointer type.
Free the portion of the data space after detach.
Const_iterator begin() const
Get the constant forward iterator for the first element in the set.
Region is reserved (blocked)
#define L4_PAGESHIFT
Size of a page, log2-based.
unsigned long l4_addr_t
Address type.
Mask of all Rm cache bits.
Search only in area, or map into area.