Home | History | Annotate | Line # | Download | only in rtl
      1 //===-- tsan_platform.h -----------------------------------------*- C++ -*-===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // This file is a part of ThreadSanitizer (TSan), a race detector.
     11 //
     12 // Platform-specific code.
     13 //===----------------------------------------------------------------------===//
     14 
     15 #ifndef TSAN_PLATFORM_H
     16 #define TSAN_PLATFORM_H
     17 
     18 #if !defined(__LP64__) && !defined(_WIN64)
     19 # error "Only 64-bit is supported"
     20 #endif
     21 
     22 #include "tsan_defs.h"
     23 #include "tsan_trace.h"
     24 
     25 namespace __tsan {
     26 
     27 #if !SANITIZER_GO
     28 
     29 #if defined(__x86_64__)
     30 /*
     31 C/C++ on linux/x86_64 and freebsd/x86_64
     32 0000 0000 1000 - 0080 0000 0000: main binary and/or MAP_32BIT mappings (512GB)
     33 0040 0000 0000 - 0100 0000 0000: -
     34 0100 0000 0000 - 2000 0000 0000: shadow
     35 2000 0000 0000 - 3000 0000 0000: -
     36 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
     37 4000 0000 0000 - 5500 0000 0000: -
     38 5500 0000 0000 - 5680 0000 0000: pie binaries without ASLR or on 4.1+ kernels
     39 5680 0000 0000 - 6000 0000 0000: -
     40 6000 0000 0000 - 6200 0000 0000: traces
     41 6200 0000 0000 - 7d00 0000 0000: -
     42 7b00 0000 0000 - 7c00 0000 0000: heap
     43 7c00 0000 0000 - 7e80 0000 0000: -
     44 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack
     45 
     46 C/C++ on netbsd/amd64 can reuse the same mapping:
     47  * The address space starts from 0x1000 (option with 0x0) and ends with
     48    0x7f7ffffff000.
     49  * LoAppMem-kHeapMemEnd can be reused as it is.
     50  * No VDSO support.
     51  * No MidAppMem region.
     52  * No additional HeapMem region.
     53  * HiAppMem contains the stack, loader, shared libraries and heap.
     54  * Stack on NetBSD/amd64 has prereserved 128MB.
     55  * Heap grows downwards (top-down).
     56  * ASLR must be disabled per-process or globally.
     57 
     58 */
     59 struct Mapping {
     60   static const uptr kMetaShadowBeg = 0x300000000000ull;
     61   static const uptr kMetaShadowEnd = 0x340000000000ull;
     62   static const uptr kTraceMemBeg   = 0x600000000000ull;
     63   static const uptr kTraceMemEnd   = 0x620000000000ull;
     64   static const uptr kShadowBeg     = 0x010000000000ull;
     65   static const uptr kShadowEnd     = 0x200000000000ull;
     66   static const uptr kHeapMemBeg    = 0x7b0000000000ull;
     67   static const uptr kHeapMemEnd    = 0x7c0000000000ull;
     68   static const uptr kLoAppMemBeg   = 0x000000001000ull;
     69   static const uptr kLoAppMemEnd   = 0x008000000000ull;
     70   static const uptr kMidAppMemBeg  = 0x550000000000ull;
     71   static const uptr kMidAppMemEnd  = 0x568000000000ull;
     72   static const uptr kHiAppMemBeg   = 0x7e8000000000ull;
     73   static const uptr kHiAppMemEnd   = 0x800000000000ull;
     74   static const uptr kAppMemMsk     = 0x780000000000ull;
     75   static const uptr kAppMemXor     = 0x040000000000ull;
     76   static const uptr kVdsoBeg       = 0xf000000000000000ull;
     77 };
     78 
     79 #define TSAN_MID_APP_RANGE 1
     80 #elif defined(__mips64)
     81 /*
     82 C/C++ on linux/mips64 (40-bit VMA)
     83 0000 0000 00 - 0100 0000 00: -                                           (4 GB)
     84 0100 0000 00 - 0200 0000 00: main binary                                 (4 GB)
     85 0200 0000 00 - 2000 0000 00: -                                         (120 GB)
     86 2000 0000 00 - 4000 0000 00: shadow                                    (128 GB)
     87 4000 0000 00 - 5000 0000 00: metainfo (memory blocks and sync objects)  (64 GB)
     88 5000 0000 00 - aa00 0000 00: -                                         (360 GB)
     89 aa00 0000 00 - ab00 0000 00: main binary (PIE)                           (4 GB)
     90 ab00 0000 00 - b000 0000 00: -                                          (20 GB)
     91 b000 0000 00 - b200 0000 00: traces                                      (8 GB)
     92 b200 0000 00 - fe00 0000 00: -                                         (304 GB)
     93 fe00 0000 00 - ff00 0000 00: heap                                        (4 GB)
     94 ff00 0000 00 - ff80 0000 00: -                                           (2 GB)
     95 ff80 0000 00 - ffff ffff ff: modules and main thread stack              (<2 GB)
     96 */
     97 struct Mapping {
     98   static const uptr kMetaShadowBeg = 0x4000000000ull;
     99   static const uptr kMetaShadowEnd = 0x5000000000ull;
    100   static const uptr kTraceMemBeg   = 0xb000000000ull;
    101   static const uptr kTraceMemEnd   = 0xb200000000ull;
    102   static const uptr kShadowBeg     = 0x2000000000ull;
    103   static const uptr kShadowEnd     = 0x4000000000ull;
    104   static const uptr kHeapMemBeg    = 0xfe00000000ull;
    105   static const uptr kHeapMemEnd    = 0xff00000000ull;
    106   static const uptr kLoAppMemBeg   = 0x0100000000ull;
    107   static const uptr kLoAppMemEnd   = 0x0200000000ull;
    108   static const uptr kMidAppMemBeg  = 0xaa00000000ull;
    109   static const uptr kMidAppMemEnd  = 0xab00000000ull;
    110   static const uptr kHiAppMemBeg   = 0xff80000000ull;
    111   static const uptr kHiAppMemEnd   = 0xffffffffffull;
    112   static const uptr kAppMemMsk     = 0xf800000000ull;
    113   static const uptr kAppMemXor     = 0x0800000000ull;
    114   static const uptr kVdsoBeg       = 0xfffff00000ull;
    115 };
    116 
    117 #define TSAN_MID_APP_RANGE 1
    118 #elif defined(__aarch64__) && defined(__APPLE__)
    119 /*
    120 C/C++ on Darwin/iOS/ARM64 (36-bit VMA, 64 GB VM)
    121 0000 0000 00 - 0100 0000 00: -                                    (4 GB)
    122 0100 0000 00 - 0200 0000 00: main binary, modules, thread stacks  (4 GB)
    123 0200 0000 00 - 0300 0000 00: heap                                 (4 GB)
    124 0300 0000 00 - 0400 0000 00: -                                    (4 GB)
    125 0400 0000 00 - 0c00 0000 00: shadow memory                       (32 GB)
    126 0c00 0000 00 - 0d00 0000 00: -                                    (4 GB)
    127 0d00 0000 00 - 0e00 0000 00: metainfo                             (4 GB)
    128 0e00 0000 00 - 0f00 0000 00: -                                    (4 GB)
    129 0f00 0000 00 - 0fc0 0000 00: traces                               (3 GB)
    130 0fc0 0000 00 - 1000 0000 00: -
    131 */
    132 struct Mapping {
    133   static const uptr kLoAppMemBeg   = 0x0100000000ull;
    134   static const uptr kLoAppMemEnd   = 0x0200000000ull;
    135   static const uptr kHeapMemBeg    = 0x0200000000ull;
    136   static const uptr kHeapMemEnd    = 0x0300000000ull;
    137   static const uptr kShadowBeg     = 0x0400000000ull;
    138   static const uptr kShadowEnd     = 0x0c00000000ull;
    139   static const uptr kMetaShadowBeg = 0x0d00000000ull;
    140   static const uptr kMetaShadowEnd = 0x0e00000000ull;
    141   static const uptr kTraceMemBeg   = 0x0f00000000ull;
    142   static const uptr kTraceMemEnd   = 0x0fc0000000ull;
    143   static const uptr kHiAppMemBeg   = 0x0fc0000000ull;
    144   static const uptr kHiAppMemEnd   = 0x0fc0000000ull;
    145   static const uptr kAppMemMsk     =          0x0ull;
    146   static const uptr kAppMemXor     =          0x0ull;
    147   static const uptr kVdsoBeg       = 0x7000000000000000ull;
    148 };
    149 
    150 #elif defined(__aarch64__)
    151 // AArch64 supports multiple VMA which leads to multiple address transformation
    152 // functions.  To support these multiple VMAS transformations and mappings TSAN
    153 // runtime for AArch64 uses an external memory read (vmaSize) to select which
    154 // mapping to use.  Although slower, it make a same instrumented binary run on
    155 // multiple kernels.
    156 
    157 /*
    158 C/C++ on linux/aarch64 (39-bit VMA)
    159 0000 0010 00 - 0100 0000 00: main binary
    160 0100 0000 00 - 0800 0000 00: -
    161 0800 0000 00 - 2000 0000 00: shadow memory
    162 2000 0000 00 - 3100 0000 00: -
    163 3100 0000 00 - 3400 0000 00: metainfo
    164 3400 0000 00 - 5500 0000 00: -
    165 5500 0000 00 - 5600 0000 00: main binary (PIE)
    166 5600 0000 00 - 6000 0000 00: -
    167 6000 0000 00 - 6200 0000 00: traces
    168 6200 0000 00 - 7d00 0000 00: -
    169 7c00 0000 00 - 7d00 0000 00: heap
    170 7d00 0000 00 - 7fff ffff ff: modules and main thread stack
    171 */
    172 struct Mapping39 {
    173   static const uptr kLoAppMemBeg   = 0x0000001000ull;
    174   static const uptr kLoAppMemEnd   = 0x0100000000ull;
    175   static const uptr kShadowBeg     = 0x0800000000ull;
    176   static const uptr kShadowEnd     = 0x2000000000ull;
    177   static const uptr kMetaShadowBeg = 0x3100000000ull;
    178   static const uptr kMetaShadowEnd = 0x3400000000ull;
    179   static const uptr kMidAppMemBeg  = 0x5500000000ull;
    180   static const uptr kMidAppMemEnd  = 0x5600000000ull;
    181   static const uptr kTraceMemBeg   = 0x6000000000ull;
    182   static const uptr kTraceMemEnd   = 0x6200000000ull;
    183   static const uptr kHeapMemBeg    = 0x7c00000000ull;
    184   static const uptr kHeapMemEnd    = 0x7d00000000ull;
    185   static const uptr kHiAppMemBeg   = 0x7e00000000ull;
    186   static const uptr kHiAppMemEnd   = 0x7fffffffffull;
    187   static const uptr kAppMemMsk     = 0x7800000000ull;
    188   static const uptr kAppMemXor     = 0x0200000000ull;
    189   static const uptr kVdsoBeg       = 0x7f00000000ull;
    190 };
    191 
    192 /*
    193 C/C++ on linux/aarch64 (42-bit VMA)
    194 00000 0010 00 - 01000 0000 00: main binary
    195 01000 0000 00 - 10000 0000 00: -
    196 10000 0000 00 - 20000 0000 00: shadow memory
    197 20000 0000 00 - 26000 0000 00: -
    198 26000 0000 00 - 28000 0000 00: metainfo
    199 28000 0000 00 - 2aa00 0000 00: -
    200 2aa00 0000 00 - 2ab00 0000 00: main binary (PIE)
    201 2ab00 0000 00 - 36200 0000 00: -
    202 36200 0000 00 - 36240 0000 00: traces
    203 36240 0000 00 - 3e000 0000 00: -
    204 3e000 0000 00 - 3f000 0000 00: heap
    205 3f000 0000 00 - 3ffff ffff ff: modules and main thread stack
    206 */
    207 struct Mapping42 {
    208   static const uptr kLoAppMemBeg   = 0x00000001000ull;
    209   static const uptr kLoAppMemEnd   = 0x01000000000ull;
    210   static const uptr kShadowBeg     = 0x10000000000ull;
    211   static const uptr kShadowEnd     = 0x20000000000ull;
    212   static const uptr kMetaShadowBeg = 0x26000000000ull;
    213   static const uptr kMetaShadowEnd = 0x28000000000ull;
    214   static const uptr kMidAppMemBeg  = 0x2aa00000000ull;
    215   static const uptr kMidAppMemEnd  = 0x2ab00000000ull;
    216   static const uptr kTraceMemBeg   = 0x36200000000ull;
    217   static const uptr kTraceMemEnd   = 0x36400000000ull;
    218   static const uptr kHeapMemBeg    = 0x3e000000000ull;
    219   static const uptr kHeapMemEnd    = 0x3f000000000ull;
    220   static const uptr kHiAppMemBeg   = 0x3f000000000ull;
    221   static const uptr kHiAppMemEnd   = 0x3ffffffffffull;
    222   static const uptr kAppMemMsk     = 0x3c000000000ull;
    223   static const uptr kAppMemXor     = 0x04000000000ull;
    224   static const uptr kVdsoBeg       = 0x37f00000000ull;
    225 };
    226 
    227 struct Mapping48 {
    228   static const uptr kLoAppMemBeg   = 0x0000000001000ull;
    229   static const uptr kLoAppMemEnd   = 0x0000200000000ull;
    230   static const uptr kShadowBeg     = 0x0002000000000ull;
    231   static const uptr kShadowEnd     = 0x0004000000000ull;
    232   static const uptr kMetaShadowBeg = 0x0005000000000ull;
    233   static const uptr kMetaShadowEnd = 0x0006000000000ull;
    234   static const uptr kMidAppMemBeg  = 0x0aaaa00000000ull;
    235   static const uptr kMidAppMemEnd  = 0x0aaaf00000000ull;
    236   static const uptr kTraceMemBeg   = 0x0f06000000000ull;
    237   static const uptr kTraceMemEnd   = 0x0f06200000000ull;
    238   static const uptr kHeapMemBeg    = 0x0ffff00000000ull;
    239   static const uptr kHeapMemEnd    = 0x0ffff00000000ull;
    240   static const uptr kHiAppMemBeg   = 0x0ffff00000000ull;
    241   static const uptr kHiAppMemEnd   = 0x1000000000000ull;
    242   static const uptr kAppMemMsk     = 0x0fff800000000ull;
    243   static const uptr kAppMemXor     = 0x0000800000000ull;
    244   static const uptr kVdsoBeg       = 0xffff000000000ull;
    245 };
    246 
    247 // Indicates the runtime will define the memory regions at runtime.
    248 #define TSAN_RUNTIME_VMA 1
    249 // Indicates that mapping defines a mid range memory segment.
    250 #define TSAN_MID_APP_RANGE 1
    251 #elif defined(__powerpc64__)
    252 // PPC64 supports multiple VMA which leads to multiple address transformation
    253 // functions.  To support these multiple VMAS transformations and mappings TSAN
    254 // runtime for PPC64 uses an external memory read (vmaSize) to select which
    255 // mapping to use.  Although slower, it make a same instrumented binary run on
    256 // multiple kernels.
    257 
    258 /*
    259 C/C++ on linux/powerpc64 (44-bit VMA)
    260 0000 0000 0100 - 0001 0000 0000: main binary
    261 0001 0000 0000 - 0001 0000 0000: -
    262 0001 0000 0000 - 0b00 0000 0000: shadow
    263 0b00 0000 0000 - 0b00 0000 0000: -
    264 0b00 0000 0000 - 0d00 0000 0000: metainfo (memory blocks and sync objects)
    265 0d00 0000 0000 - 0d00 0000 0000: -
    266 0d00 0000 0000 - 0f00 0000 0000: traces
    267 0f00 0000 0000 - 0f00 0000 0000: -
    268 0f00 0000 0000 - 0f50 0000 0000: heap
    269 0f50 0000 0000 - 0f60 0000 0000: -
    270 0f60 0000 0000 - 1000 0000 0000: modules and main thread stack
    271 */
    272 struct Mapping44 {
    273   static const uptr kMetaShadowBeg = 0x0b0000000000ull;
    274   static const uptr kMetaShadowEnd = 0x0d0000000000ull;
    275   static const uptr kTraceMemBeg   = 0x0d0000000000ull;
    276   static const uptr kTraceMemEnd   = 0x0f0000000000ull;
    277   static const uptr kShadowBeg     = 0x000100000000ull;
    278   static const uptr kShadowEnd     = 0x0b0000000000ull;
    279   static const uptr kLoAppMemBeg   = 0x000000000100ull;
    280   static const uptr kLoAppMemEnd   = 0x000100000000ull;
    281   static const uptr kHeapMemBeg    = 0x0f0000000000ull;
    282   static const uptr kHeapMemEnd    = 0x0f5000000000ull;
    283   static const uptr kHiAppMemBeg   = 0x0f6000000000ull;
    284   static const uptr kHiAppMemEnd   = 0x100000000000ull; // 44 bits
    285   static const uptr kAppMemMsk     = 0x0f0000000000ull;
    286   static const uptr kAppMemXor     = 0x002100000000ull;
    287   static const uptr kVdsoBeg       = 0x3c0000000000000ull;
    288 };
    289 
    290 /*
    291 C/C++ on linux/powerpc64 (46-bit VMA)
    292 0000 0000 1000 - 0100 0000 0000: main binary
    293 0100 0000 0000 - 0200 0000 0000: -
    294 0100 0000 0000 - 1000 0000 0000: shadow
    295 1000 0000 0000 - 1000 0000 0000: -
    296 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects)
    297 2000 0000 0000 - 2000 0000 0000: -
    298 2000 0000 0000 - 2200 0000 0000: traces
    299 2200 0000 0000 - 3d00 0000 0000: -
    300 3d00 0000 0000 - 3e00 0000 0000: heap
    301 3e00 0000 0000 - 3e80 0000 0000: -
    302 3e80 0000 0000 - 4000 0000 0000: modules and main thread stack
    303 */
    304 struct Mapping46 {
    305   static const uptr kMetaShadowBeg = 0x100000000000ull;
    306   static const uptr kMetaShadowEnd = 0x200000000000ull;
    307   static const uptr kTraceMemBeg   = 0x200000000000ull;
    308   static const uptr kTraceMemEnd   = 0x220000000000ull;
    309   static const uptr kShadowBeg     = 0x010000000000ull;
    310   static const uptr kShadowEnd     = 0x100000000000ull;
    311   static const uptr kHeapMemBeg    = 0x3d0000000000ull;
    312   static const uptr kHeapMemEnd    = 0x3e0000000000ull;
    313   static const uptr kLoAppMemBeg   = 0x000000001000ull;
    314   static const uptr kLoAppMemEnd   = 0x010000000000ull;
    315   static const uptr kHiAppMemBeg   = 0x3e8000000000ull;
    316   static const uptr kHiAppMemEnd   = 0x400000000000ull; // 46 bits
    317   static const uptr kAppMemMsk     = 0x3c0000000000ull;
    318   static const uptr kAppMemXor     = 0x020000000000ull;
    319   static const uptr kVdsoBeg       = 0x7800000000000000ull;
    320 };
    321 
    322 /*
    323 C/C++ on linux/powerpc64 (47-bit VMA)
    324 0000 0000 1000 - 0100 0000 0000: main binary
    325 0100 0000 0000 - 0200 0000 0000: -
    326 0100 0000 0000 - 1000 0000 0000: shadow
    327 1000 0000 0000 - 1000 0000 0000: -
    328 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects)
    329 2000 0000 0000 - 2000 0000 0000: -
    330 2000 0000 0000 - 2200 0000 0000: traces
    331 2200 0000 0000 - 7d00 0000 0000: -
    332 7d00 0000 0000 - 7e00 0000 0000: heap
    333 7e00 0000 0000 - 7e80 0000 0000: -
    334 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack
    335 */
    336 struct Mapping47 {
    337   static const uptr kMetaShadowBeg = 0x100000000000ull;
    338   static const uptr kMetaShadowEnd = 0x200000000000ull;
    339   static const uptr kTraceMemBeg   = 0x200000000000ull;
    340   static const uptr kTraceMemEnd   = 0x220000000000ull;
    341   static const uptr kShadowBeg     = 0x010000000000ull;
    342   static const uptr kShadowEnd     = 0x100000000000ull;
    343   static const uptr kHeapMemBeg    = 0x7d0000000000ull;
    344   static const uptr kHeapMemEnd    = 0x7e0000000000ull;
    345   static const uptr kLoAppMemBeg   = 0x000000001000ull;
    346   static const uptr kLoAppMemEnd   = 0x010000000000ull;
    347   static const uptr kHiAppMemBeg   = 0x7e8000000000ull;
    348   static const uptr kHiAppMemEnd   = 0x800000000000ull; // 47 bits
    349   static const uptr kAppMemMsk     = 0x7c0000000000ull;
    350   static const uptr kAppMemXor     = 0x020000000000ull;
    351   static const uptr kVdsoBeg       = 0x7800000000000000ull;
    352 };
    353 
    354 // Indicates the runtime will define the memory regions at runtime.
    355 #define TSAN_RUNTIME_VMA 1
    356 #endif
    357 
    358 #elif SANITIZER_GO && !SANITIZER_WINDOWS && defined(__x86_64__)
    359 
    360 /* Go on linux, darwin and freebsd on x86_64
    361 0000 0000 1000 - 0000 1000 0000: executable
    362 0000 1000 0000 - 00c0 0000 0000: -
    363 00c0 0000 0000 - 00e0 0000 0000: heap
    364 00e0 0000 0000 - 2000 0000 0000: -
    365 2000 0000 0000 - 2380 0000 0000: shadow
    366 2380 0000 0000 - 3000 0000 0000: -
    367 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
    368 4000 0000 0000 - 6000 0000 0000: -
    369 6000 0000 0000 - 6200 0000 0000: traces
    370 6200 0000 0000 - 8000 0000 0000: -
    371 */
    372 
    373 struct Mapping {
    374   static const uptr kMetaShadowBeg = 0x300000000000ull;
    375   static const uptr kMetaShadowEnd = 0x400000000000ull;
    376   static const uptr kTraceMemBeg   = 0x600000000000ull;
    377   static const uptr kTraceMemEnd   = 0x620000000000ull;
    378   static const uptr kShadowBeg     = 0x200000000000ull;
    379   static const uptr kShadowEnd     = 0x238000000000ull;
    380   static const uptr kAppMemBeg     = 0x000000001000ull;
    381   static const uptr kAppMemEnd     = 0x00e000000000ull;
    382 };
    383 
    384 #elif SANITIZER_GO && SANITIZER_WINDOWS
    385 
    386 /* Go on windows
    387 0000 0000 1000 - 0000 1000 0000: executable
    388 0000 1000 0000 - 00f8 0000 0000: -
    389 00c0 0000 0000 - 00e0 0000 0000: heap
    390 00e0 0000 0000 - 0100 0000 0000: -
    391 0100 0000 0000 - 0500 0000 0000: shadow
    392 0500 0000 0000 - 0560 0000 0000: -
    393 0560 0000 0000 - 0760 0000 0000: traces
    394 0760 0000 0000 - 07d0 0000 0000: metainfo (memory blocks and sync objects)
    395 07d0 0000 0000 - 8000 0000 0000: -
    396 */
    397 
    398 struct Mapping {
    399   static const uptr kMetaShadowBeg = 0x076000000000ull;
    400   static const uptr kMetaShadowEnd = 0x07d000000000ull;
    401   static const uptr kTraceMemBeg   = 0x056000000000ull;
    402   static const uptr kTraceMemEnd   = 0x076000000000ull;
    403   static const uptr kShadowBeg     = 0x010000000000ull;
    404   static const uptr kShadowEnd     = 0x050000000000ull;
    405   static const uptr kAppMemBeg     = 0x000000001000ull;
    406   static const uptr kAppMemEnd     = 0x00e000000000ull;
    407 };
    408 
    409 #elif SANITIZER_GO && defined(__powerpc64__)
    410 
    411 /* Only Mapping46 and Mapping47 are currently supported for powercp64 on Go. */
    412 
    413 /* Go on linux/powerpc64 (46-bit VMA)
    414 0000 0000 1000 - 0000 1000 0000: executable
    415 0000 1000 0000 - 00c0 0000 0000: -
    416 00c0 0000 0000 - 00e0 0000 0000: heap
    417 00e0 0000 0000 - 2000 0000 0000: -
    418 2000 0000 0000 - 2380 0000 0000: shadow
    419 2380 0000 0000 - 2400 0000 0000: -
    420 2400 0000 0000 - 3400 0000 0000: metainfo (memory blocks and sync objects)
    421 3400 0000 0000 - 3600 0000 0000: -
    422 3600 0000 0000 - 3800 0000 0000: traces
    423 3800 0000 0000 - 4000 0000 0000: -
    424 */
    425 
    426 struct Mapping46 {
    427   static const uptr kMetaShadowBeg = 0x240000000000ull;
    428   static const uptr kMetaShadowEnd = 0x340000000000ull;
    429   static const uptr kTraceMemBeg   = 0x360000000000ull;
    430   static const uptr kTraceMemEnd   = 0x380000000000ull;
    431   static const uptr kShadowBeg     = 0x200000000000ull;
    432   static const uptr kShadowEnd     = 0x238000000000ull;
    433   static const uptr kAppMemBeg     = 0x000000001000ull;
    434   static const uptr kAppMemEnd     = 0x00e000000000ull;
    435 };
    436 
    437 /* Go on linux/powerpc64 (47-bit VMA)
    438 0000 0000 1000 - 0000 1000 0000: executable
    439 0000 1000 0000 - 00c0 0000 0000: -
    440 00c0 0000 0000 - 00e0 0000 0000: heap
    441 00e0 0000 0000 - 2000 0000 0000: -
    442 2000 0000 0000 - 3000 0000 0000: shadow
    443 3000 0000 0000 - 3000 0000 0000: -
    444 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
    445 4000 0000 0000 - 6000 0000 0000: -
    446 6000 0000 0000 - 6200 0000 0000: traces
    447 6200 0000 0000 - 8000 0000 0000: -
    448 */
    449 
    450 struct Mapping47 {
    451   static const uptr kMetaShadowBeg = 0x300000000000ull;
    452   static const uptr kMetaShadowEnd = 0x400000000000ull;
    453   static const uptr kTraceMemBeg   = 0x600000000000ull;
    454   static const uptr kTraceMemEnd   = 0x620000000000ull;
    455   static const uptr kShadowBeg     = 0x200000000000ull;
    456   static const uptr kShadowEnd     = 0x300000000000ull;
    457   static const uptr kAppMemBeg     = 0x000000001000ull;
    458   static const uptr kAppMemEnd     = 0x00e000000000ull;
    459 };
    460 
    461 #elif SANITIZER_GO && defined(__aarch64__)
    462 
    463 /* Go on linux/aarch64 (48-bit VMA)
    464 0000 0000 1000 - 0000 1000 0000: executable
    465 0000 1000 0000 - 00c0 0000 0000: -
    466 00c0 0000 0000 - 00e0 0000 0000: heap
    467 00e0 0000 0000 - 2000 0000 0000: -
    468 2000 0000 0000 - 3000 0000 0000: shadow
    469 3000 0000 0000 - 3000 0000 0000: -
    470 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
    471 4000 0000 0000 - 6000 0000 0000: -
    472 6000 0000 0000 - 6200 0000 0000: traces
    473 6200 0000 0000 - 8000 0000 0000: -
    474 */
    475 
    476 struct Mapping {
    477   static const uptr kMetaShadowBeg = 0x300000000000ull;
    478   static const uptr kMetaShadowEnd = 0x400000000000ull;
    479   static const uptr kTraceMemBeg   = 0x600000000000ull;
    480   static const uptr kTraceMemEnd   = 0x620000000000ull;
    481   static const uptr kShadowBeg     = 0x200000000000ull;
    482   static const uptr kShadowEnd     = 0x300000000000ull;
    483   static const uptr kAppMemBeg     = 0x000000001000ull;
    484   static const uptr kAppMemEnd     = 0x00e000000000ull;
    485 };
    486 
    487 // Indicates the runtime will define the memory regions at runtime.
    488 #define TSAN_RUNTIME_VMA 1
    489 
    490 #else
    491 # error "Unknown platform"
    492 #endif
    493 
    494 
    495 #ifdef TSAN_RUNTIME_VMA
    496 extern uptr vmaSize;
    497 #endif
    498 
    499 
    500 enum MappingType {
    501   MAPPING_LO_APP_BEG,
    502   MAPPING_LO_APP_END,
    503   MAPPING_HI_APP_BEG,
    504   MAPPING_HI_APP_END,
    505 #ifdef TSAN_MID_APP_RANGE
    506   MAPPING_MID_APP_BEG,
    507   MAPPING_MID_APP_END,
    508 #endif
    509   MAPPING_HEAP_BEG,
    510   MAPPING_HEAP_END,
    511   MAPPING_APP_BEG,
    512   MAPPING_APP_END,
    513   MAPPING_SHADOW_BEG,
    514   MAPPING_SHADOW_END,
    515   MAPPING_META_SHADOW_BEG,
    516   MAPPING_META_SHADOW_END,
    517   MAPPING_TRACE_BEG,
    518   MAPPING_TRACE_END,
    519   MAPPING_VDSO_BEG,
    520 };
    521 
    522 template<typename Mapping, int Type>
    523 uptr MappingImpl(void) {
    524   switch (Type) {
    525 #if !SANITIZER_GO
    526     case MAPPING_LO_APP_BEG: return Mapping::kLoAppMemBeg;
    527     case MAPPING_LO_APP_END: return Mapping::kLoAppMemEnd;
    528 # ifdef TSAN_MID_APP_RANGE
    529     case MAPPING_MID_APP_BEG: return Mapping::kMidAppMemBeg;
    530     case MAPPING_MID_APP_END: return Mapping::kMidAppMemEnd;
    531 # endif
    532     case MAPPING_HI_APP_BEG: return Mapping::kHiAppMemBeg;
    533     case MAPPING_HI_APP_END: return Mapping::kHiAppMemEnd;
    534     case MAPPING_HEAP_BEG: return Mapping::kHeapMemBeg;
    535     case MAPPING_HEAP_END: return Mapping::kHeapMemEnd;
    536     case MAPPING_VDSO_BEG: return Mapping::kVdsoBeg;
    537 #else
    538     case MAPPING_APP_BEG: return Mapping::kAppMemBeg;
    539     case MAPPING_APP_END: return Mapping::kAppMemEnd;
    540 #endif
    541     case MAPPING_SHADOW_BEG: return Mapping::kShadowBeg;
    542     case MAPPING_SHADOW_END: return Mapping::kShadowEnd;
    543     case MAPPING_META_SHADOW_BEG: return Mapping::kMetaShadowBeg;
    544     case MAPPING_META_SHADOW_END: return Mapping::kMetaShadowEnd;
    545     case MAPPING_TRACE_BEG: return Mapping::kTraceMemBeg;
    546     case MAPPING_TRACE_END: return Mapping::kTraceMemEnd;
    547   }
    548 }
    549 
    550 template<int Type>
    551 uptr MappingArchImpl(void) {
    552 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
    553   switch (vmaSize) {
    554     case 39: return MappingImpl<Mapping39, Type>();
    555     case 42: return MappingImpl<Mapping42, Type>();
    556     case 48: return MappingImpl<Mapping48, Type>();
    557   }
    558   DCHECK(0);
    559   return 0;
    560 #elif defined(__powerpc64__)
    561   switch (vmaSize) {
    562 #if !SANITIZER_GO
    563     case 44: return MappingImpl<Mapping44, Type>();
    564 #endif
    565     case 46: return MappingImpl<Mapping46, Type>();
    566     case 47: return MappingImpl<Mapping47, Type>();
    567   }
    568   DCHECK(0);
    569   return 0;
    570 #else
    571   return MappingImpl<Mapping, Type>();
    572 #endif
    573 }
    574 
    575 #if !SANITIZER_GO
    576 ALWAYS_INLINE
    577 uptr LoAppMemBeg(void) {
    578   return MappingArchImpl<MAPPING_LO_APP_BEG>();
    579 }
    580 ALWAYS_INLINE
    581 uptr LoAppMemEnd(void) {
    582   return MappingArchImpl<MAPPING_LO_APP_END>();
    583 }
    584 
    585 #ifdef TSAN_MID_APP_RANGE
    586 ALWAYS_INLINE
    587 uptr MidAppMemBeg(void) {
    588   return MappingArchImpl<MAPPING_MID_APP_BEG>();
    589 }
    590 ALWAYS_INLINE
    591 uptr MidAppMemEnd(void) {
    592   return MappingArchImpl<MAPPING_MID_APP_END>();
    593 }
    594 #endif
    595 
    596 ALWAYS_INLINE
    597 uptr HeapMemBeg(void) {
    598   return MappingArchImpl<MAPPING_HEAP_BEG>();
    599 }
    600 ALWAYS_INLINE
    601 uptr HeapMemEnd(void) {
    602   return MappingArchImpl<MAPPING_HEAP_END>();
    603 }
    604 
    605 ALWAYS_INLINE
    606 uptr HiAppMemBeg(void) {
    607   return MappingArchImpl<MAPPING_HI_APP_BEG>();
    608 }
    609 ALWAYS_INLINE
    610 uptr HiAppMemEnd(void) {
    611   return MappingArchImpl<MAPPING_HI_APP_END>();
    612 }
    613 
    614 ALWAYS_INLINE
    615 uptr VdsoBeg(void) {
    616   return MappingArchImpl<MAPPING_VDSO_BEG>();
    617 }
    618 
    619 #else
    620 
    621 ALWAYS_INLINE
    622 uptr AppMemBeg(void) {
    623   return MappingArchImpl<MAPPING_APP_BEG>();
    624 }
    625 ALWAYS_INLINE
    626 uptr AppMemEnd(void) {
    627   return MappingArchImpl<MAPPING_APP_END>();
    628 }
    629 
    630 #endif
    631 
    632 static inline
    633 bool GetUserRegion(int i, uptr *start, uptr *end) {
    634   switch (i) {
    635   default:
    636     return false;
    637 #if !SANITIZER_GO
    638   case 0:
    639     *start = LoAppMemBeg();
    640     *end = LoAppMemEnd();
    641     return true;
    642   case 1:
    643     *start = HiAppMemBeg();
    644     *end = HiAppMemEnd();
    645     return true;
    646   case 2:
    647     *start = HeapMemBeg();
    648     *end = HeapMemEnd();
    649     return true;
    650 # ifdef TSAN_MID_APP_RANGE
    651   case 3:
    652     *start = MidAppMemBeg();
    653     *end = MidAppMemEnd();
    654     return true;
    655 # endif
    656 #else
    657   case 0:
    658     *start = AppMemBeg();
    659     *end = AppMemEnd();
    660     return true;
    661 #endif
    662   }
    663 }
    664 
    665 ALWAYS_INLINE
    666 uptr ShadowBeg(void) {
    667   return MappingArchImpl<MAPPING_SHADOW_BEG>();
    668 }
    669 ALWAYS_INLINE
    670 uptr ShadowEnd(void) {
    671   return MappingArchImpl<MAPPING_SHADOW_END>();
    672 }
    673 
    674 ALWAYS_INLINE
    675 uptr MetaShadowBeg(void) {
    676   return MappingArchImpl<MAPPING_META_SHADOW_BEG>();
    677 }
    678 ALWAYS_INLINE
    679 uptr MetaShadowEnd(void) {
    680   return MappingArchImpl<MAPPING_META_SHADOW_END>();
    681 }
    682 
    683 ALWAYS_INLINE
    684 uptr TraceMemBeg(void) {
    685   return MappingArchImpl<MAPPING_TRACE_BEG>();
    686 }
    687 ALWAYS_INLINE
    688 uptr TraceMemEnd(void) {
    689   return MappingArchImpl<MAPPING_TRACE_END>();
    690 }
    691 
    692 
    693 template<typename Mapping>
    694 bool IsAppMemImpl(uptr mem) {
    695 #if !SANITIZER_GO
    696   return (mem >= Mapping::kHeapMemBeg && mem < Mapping::kHeapMemEnd) ||
    697 # ifdef TSAN_MID_APP_RANGE
    698          (mem >= Mapping::kMidAppMemBeg && mem < Mapping::kMidAppMemEnd) ||
    699 # endif
    700          (mem >= Mapping::kLoAppMemBeg && mem < Mapping::kLoAppMemEnd) ||
    701          (mem >= Mapping::kHiAppMemBeg && mem < Mapping::kHiAppMemEnd);
    702 #else
    703   return mem >= Mapping::kAppMemBeg && mem < Mapping::kAppMemEnd;
    704 #endif
    705 }
    706 
    707 ALWAYS_INLINE
    708 bool IsAppMem(uptr mem) {
    709 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
    710   switch (vmaSize) {
    711     case 39: return IsAppMemImpl<Mapping39>(mem);
    712     case 42: return IsAppMemImpl<Mapping42>(mem);
    713     case 48: return IsAppMemImpl<Mapping48>(mem);
    714   }
    715   DCHECK(0);
    716   return false;
    717 #elif defined(__powerpc64__)
    718   switch (vmaSize) {
    719 #if !SANITIZER_GO
    720     case 44: return IsAppMemImpl<Mapping44>(mem);
    721 #endif
    722     case 46: return IsAppMemImpl<Mapping46>(mem);
    723     case 47: return IsAppMemImpl<Mapping47>(mem);
    724   }
    725   DCHECK(0);
    726   return false;
    727 #else
    728   return IsAppMemImpl<Mapping>(mem);
    729 #endif
    730 }
    731 
    732 
    733 template<typename Mapping>
    734 bool IsShadowMemImpl(uptr mem) {
    735   return mem >= Mapping::kShadowBeg && mem <= Mapping::kShadowEnd;
    736 }
    737 
    738 ALWAYS_INLINE
    739 bool IsShadowMem(uptr mem) {
    740 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
    741   switch (vmaSize) {
    742     case 39: return IsShadowMemImpl<Mapping39>(mem);
    743     case 42: return IsShadowMemImpl<Mapping42>(mem);
    744     case 48: return IsShadowMemImpl<Mapping48>(mem);
    745   }
    746   DCHECK(0);
    747   return false;
    748 #elif defined(__powerpc64__)
    749   switch (vmaSize) {
    750 #if !SANITIZER_GO
    751     case 44: return IsShadowMemImpl<Mapping44>(mem);
    752 #endif
    753     case 46: return IsShadowMemImpl<Mapping46>(mem);
    754     case 47: return IsShadowMemImpl<Mapping47>(mem);
    755   }
    756   DCHECK(0);
    757   return false;
    758 #else
    759   return IsShadowMemImpl<Mapping>(mem);
    760 #endif
    761 }
    762 
    763 
    764 template<typename Mapping>
    765 bool IsMetaMemImpl(uptr mem) {
    766   return mem >= Mapping::kMetaShadowBeg && mem <= Mapping::kMetaShadowEnd;
    767 }
    768 
    769 ALWAYS_INLINE
    770 bool IsMetaMem(uptr mem) {
    771 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
    772   switch (vmaSize) {
    773     case 39: return IsMetaMemImpl<Mapping39>(mem);
    774     case 42: return IsMetaMemImpl<Mapping42>(mem);
    775     case 48: return IsMetaMemImpl<Mapping48>(mem);
    776   }
    777   DCHECK(0);
    778   return false;
    779 #elif defined(__powerpc64__)
    780   switch (vmaSize) {
    781 #if !SANITIZER_GO
    782     case 44: return IsMetaMemImpl<Mapping44>(mem);
    783 #endif
    784     case 46: return IsMetaMemImpl<Mapping46>(mem);
    785     case 47: return IsMetaMemImpl<Mapping47>(mem);
    786   }
    787   DCHECK(0);
    788   return false;
    789 #else
    790   return IsMetaMemImpl<Mapping>(mem);
    791 #endif
    792 }
    793 
    794 
    795 template<typename Mapping>
    796 uptr MemToShadowImpl(uptr x) {
    797   DCHECK(IsAppMem(x));
    798 #if !SANITIZER_GO
    799   return (((x) & ~(Mapping::kAppMemMsk | (kShadowCell - 1)))
    800       ^ Mapping::kAppMemXor) * kShadowCnt;
    801 #else
    802 # ifndef SANITIZER_WINDOWS
    803   return ((x & ~(kShadowCell - 1)) * kShadowCnt) | Mapping::kShadowBeg;
    804 # else
    805   return ((x & ~(kShadowCell - 1)) * kShadowCnt) + Mapping::kShadowBeg;
    806 # endif
    807 #endif
    808 }
    809 
    810 ALWAYS_INLINE
    811 uptr MemToShadow(uptr x) {
    812 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
    813   switch (vmaSize) {
    814     case 39: return MemToShadowImpl<Mapping39>(x);
    815     case 42: return MemToShadowImpl<Mapping42>(x);
    816     case 48: return MemToShadowImpl<Mapping48>(x);
    817   }
    818   DCHECK(0);
    819   return 0;
    820 #elif defined(__powerpc64__)
    821   switch (vmaSize) {
    822 #if !SANITIZER_GO
    823     case 44: return MemToShadowImpl<Mapping44>(x);
    824 #endif
    825     case 46: return MemToShadowImpl<Mapping46>(x);
    826     case 47: return MemToShadowImpl<Mapping47>(x);
    827   }
    828   DCHECK(0);
    829   return 0;
    830 #else
    831   return MemToShadowImpl<Mapping>(x);
    832 #endif
    833 }
    834 
    835 
    836 template<typename Mapping>
    837 u32 *MemToMetaImpl(uptr x) {
    838   DCHECK(IsAppMem(x));
    839 #if !SANITIZER_GO
    840   return (u32*)(((((x) & ~(Mapping::kAppMemMsk | (kMetaShadowCell - 1)))) /
    841       kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
    842 #else
    843 # ifndef SANITIZER_WINDOWS
    844   return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
    845       kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
    846 # else
    847   return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
    848       kMetaShadowCell * kMetaShadowSize) + Mapping::kMetaShadowBeg);
    849 # endif
    850 #endif
    851 }
    852 
    853 ALWAYS_INLINE
    854 u32 *MemToMeta(uptr x) {
    855 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
    856   switch (vmaSize) {
    857     case 39: return MemToMetaImpl<Mapping39>(x);
    858     case 42: return MemToMetaImpl<Mapping42>(x);
    859     case 48: return MemToMetaImpl<Mapping48>(x);
    860   }
    861   DCHECK(0);
    862   return 0;
    863 #elif defined(__powerpc64__)
    864   switch (vmaSize) {
    865 #if !SANITIZER_GO
    866     case 44: return MemToMetaImpl<Mapping44>(x);
    867 #endif
    868     case 46: return MemToMetaImpl<Mapping46>(x);
    869     case 47: return MemToMetaImpl<Mapping47>(x);
    870   }
    871   DCHECK(0);
    872   return 0;
    873 #else
    874   return MemToMetaImpl<Mapping>(x);
    875 #endif
    876 }
    877 
    878 
    879 template<typename Mapping>
    880 uptr ShadowToMemImpl(uptr s) {
    881   DCHECK(IsShadowMem(s));
    882 #if !SANITIZER_GO
    883   // The shadow mapping is non-linear and we've lost some bits, so we don't have
    884   // an easy way to restore the original app address. But the mapping is a
    885   // bijection, so we try to restore the address as belonging to low/mid/high
    886   // range consecutively and see if shadow->app->shadow mapping gives us the
    887   // same address.
    888   uptr p = (s / kShadowCnt) ^ Mapping::kAppMemXor;
    889   if (p >= Mapping::kLoAppMemBeg && p < Mapping::kLoAppMemEnd &&
    890       MemToShadow(p) == s)
    891     return p;
    892 # ifdef TSAN_MID_APP_RANGE
    893   p = ((s / kShadowCnt) ^ Mapping::kAppMemXor) +
    894       (Mapping::kMidAppMemBeg & Mapping::kAppMemMsk);
    895   if (p >= Mapping::kMidAppMemBeg && p < Mapping::kMidAppMemEnd &&
    896       MemToShadow(p) == s)
    897     return p;
    898 # endif
    899   return ((s / kShadowCnt) ^ Mapping::kAppMemXor) | Mapping::kAppMemMsk;
    900 #else  // #if !SANITIZER_GO
    901 # ifndef SANITIZER_WINDOWS
    902   return (s & ~Mapping::kShadowBeg) / kShadowCnt;
    903 # else
    904   return (s - Mapping::kShadowBeg) / kShadowCnt;
    905 # endif // SANITIZER_WINDOWS
    906 #endif
    907 }
    908 
    909 ALWAYS_INLINE
    910 uptr ShadowToMem(uptr s) {
    911 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
    912   switch (vmaSize) {
    913     case 39: return ShadowToMemImpl<Mapping39>(s);
    914     case 42: return ShadowToMemImpl<Mapping42>(s);
    915     case 48: return ShadowToMemImpl<Mapping48>(s);
    916   }
    917   DCHECK(0);
    918   return 0;
    919 #elif defined(__powerpc64__)
    920   switch (vmaSize) {
    921 #if !SANITIZER_GO
    922     case 44: return ShadowToMemImpl<Mapping44>(s);
    923 #endif
    924     case 46: return ShadowToMemImpl<Mapping46>(s);
    925     case 47: return ShadowToMemImpl<Mapping47>(s);
    926   }
    927   DCHECK(0);
    928   return 0;
    929 #else
    930   return ShadowToMemImpl<Mapping>(s);
    931 #endif
    932 }
    933 
    934 
    935 
    936 // The additional page is to catch shadow stack overflow as paging fault.
    937 // Windows wants 64K alignment for mmaps.
    938 const uptr kTotalTraceSize = (kTraceSize * sizeof(Event) + sizeof(Trace)
    939     + (64 << 10) + (64 << 10) - 1) & ~((64 << 10) - 1);
    940 
    941 template<typename Mapping>
    942 uptr GetThreadTraceImpl(int tid) {
    943   uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize;
    944   DCHECK_LT(p, Mapping::kTraceMemEnd);
    945   return p;
    946 }
    947 
    948 ALWAYS_INLINE
    949 uptr GetThreadTrace(int tid) {
    950 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
    951   switch (vmaSize) {
    952     case 39: return GetThreadTraceImpl<Mapping39>(tid);
    953     case 42: return GetThreadTraceImpl<Mapping42>(tid);
    954     case 48: return GetThreadTraceImpl<Mapping48>(tid);
    955   }
    956   DCHECK(0);
    957   return 0;
    958 #elif defined(__powerpc64__)
    959   switch (vmaSize) {
    960 #if !SANITIZER_GO
    961     case 44: return GetThreadTraceImpl<Mapping44>(tid);
    962 #endif
    963     case 46: return GetThreadTraceImpl<Mapping46>(tid);
    964     case 47: return GetThreadTraceImpl<Mapping47>(tid);
    965   }
    966   DCHECK(0);
    967   return 0;
    968 #else
    969   return GetThreadTraceImpl<Mapping>(tid);
    970 #endif
    971 }
    972 
    973 
    974 template<typename Mapping>
    975 uptr GetThreadTraceHeaderImpl(int tid) {
    976   uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize
    977       + kTraceSize * sizeof(Event);
    978   DCHECK_LT(p, Mapping::kTraceMemEnd);
    979   return p;
    980 }
    981 
    982 ALWAYS_INLINE
    983 uptr GetThreadTraceHeader(int tid) {
    984 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
    985   switch (vmaSize) {
    986     case 39: return GetThreadTraceHeaderImpl<Mapping39>(tid);
    987     case 42: return GetThreadTraceHeaderImpl<Mapping42>(tid);
    988     case 48: return GetThreadTraceHeaderImpl<Mapping48>(tid);
    989   }
    990   DCHECK(0);
    991   return 0;
    992 #elif defined(__powerpc64__)
    993   switch (vmaSize) {
    994 #if !SANITIZER_GO
    995     case 44: return GetThreadTraceHeaderImpl<Mapping44>(tid);
    996 #endif
    997     case 46: return GetThreadTraceHeaderImpl<Mapping46>(tid);
    998     case 47: return GetThreadTraceHeaderImpl<Mapping47>(tid);
    999   }
   1000   DCHECK(0);
   1001   return 0;
   1002 #else
   1003   return GetThreadTraceHeaderImpl<Mapping>(tid);
   1004 #endif
   1005 }
   1006 
   1007 void InitializePlatform();
   1008 void InitializePlatformEarly();
   1009 void CheckAndProtect();
   1010 void InitializeShadowMemoryPlatform();
   1011 void FlushShadowMemory();
   1012 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive);
   1013 int ExtractResolvFDs(void *state, int *fds, int nfd);
   1014 int ExtractRecvmsgFDs(void *msg, int *fds, int nfd);
   1015 void ImitateTlsWrite(ThreadState *thr, uptr tls_addr, uptr tls_size);
   1016 
   1017 int call_pthread_cancel_with_cleanup(int(*fn)(void *c, void *m,
   1018     void *abstime), void *c, void *m, void *abstime,
   1019     void(*cleanup)(void *arg), void *arg);
   1020 
   1021 void DestroyThreadState();
   1022 
   1023 }  // namespace __tsan
   1024 
   1025 #endif  // TSAN_PLATFORM_H
   1026