migration/rdma: Plug memory leaks in qemu_rdma_registration_stop()
[qemu/armbru.git] / disas / libvixl / vixl / a64 / cpu-a64.h
blobcdf09a6af1741ff99fd2e021bdbd9e5a0677c0ef
1 // Copyright 2014, ARM Limited
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 // * Redistributions of source code must retain the above copyright notice,
8 // this list of conditions and the following disclaimer.
9 // * Redistributions in binary form must reproduce the above copyright notice,
10 // this list of conditions and the following disclaimer in the documentation
11 // and/or other materials provided with the distribution.
12 // * Neither the name of ARM Limited nor the names of its contributors may be
13 // used to endorse or promote products derived from this software without
14 // specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #ifndef VIXL_CPU_A64_H
28 #define VIXL_CPU_A64_H
30 #include "vixl/globals.h"
31 #include "vixl/a64/instructions-a64.h"
33 namespace vixl {
35 class CPU {
36 public:
37 // Initialise CPU support.
38 static void SetUp();
40 // Ensures the data at a given address and with a given size is the same for
41 // the I and D caches. I and D caches are not automatically coherent on ARM
42 // so this operation is required before any dynamically generated code can
43 // safely run.
44 static void EnsureIAndDCacheCoherency(void *address, size_t length);
46 // Handle tagged pointers.
47 template <typename T>
48 static T SetPointerTag(T pointer, uint64_t tag) {
49 VIXL_ASSERT(is_uintn(kAddressTagWidth, tag));
51 // Use C-style casts to get static_cast behaviour for integral types (T),
52 // and reinterpret_cast behaviour for other types.
54 uint64_t raw = (uint64_t)pointer;
55 VIXL_STATIC_ASSERT(sizeof(pointer) == sizeof(raw));
57 raw = (raw & ~kAddressTagMask) | (tag << kAddressTagOffset);
58 return (T)raw;
61 template <typename T>
62 static uint64_t GetPointerTag(T pointer) {
63 // Use C-style casts to get static_cast behaviour for integral types (T),
64 // and reinterpret_cast behaviour for other types.
66 uint64_t raw = (uint64_t)pointer;
67 VIXL_STATIC_ASSERT(sizeof(pointer) == sizeof(raw));
69 return (raw & kAddressTagMask) >> kAddressTagOffset;
72 private:
73 // Return the content of the cache type register.
74 static uint32_t GetCacheType();
76 // I and D cache line size in bytes.
77 static unsigned icache_line_size_;
78 static unsigned dcache_line_size_;
81 } // namespace vixl
83 #endif // VIXL_CPU_A64_H