Home | History | Annotate | Line # | Download | only in aarch64
      1   1.1  mrg /* Machine description for AArch64 architecture.
      2  1.10  mrg    Copyright (C) 2012-2022 Free Software Foundation, Inc.
      3   1.1  mrg    Contributed by ARM Ltd.
      4   1.1  mrg 
      5   1.1  mrg This file is part of GCC.
      6   1.1  mrg 
      7   1.1  mrg GCC is free software; you can redistribute it and/or modify it under
      8   1.1  mrg the terms of the GNU General Public License as published by the Free
      9   1.1  mrg Software Foundation; either version 3, or (at your option) any later
     10   1.1  mrg version.
     11   1.1  mrg 
     12   1.1  mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     13   1.1  mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
     14   1.1  mrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     15   1.1  mrg for more details.
     16   1.1  mrg 
     17   1.1  mrg Under Section 7 of GPL version 3, you are granted additional
     18   1.1  mrg permissions described in the GCC Runtime Library Exception, version
     19   1.1  mrg 3.1, as published by the Free Software Foundation.
     20   1.1  mrg 
     21   1.1  mrg You should have received a copy of the GNU General Public License and
     22   1.1  mrg a copy of the GCC Runtime Library Exception along with this program;
     23   1.1  mrg see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
     24   1.1  mrg <http://www.gnu.org/licenses/>.  */
     25   1.1  mrg 
     26   1.9  mrg #define CTR_IDC_SHIFT           28
     27   1.9  mrg #define CTR_DIC_SHIFT           29
     28   1.9  mrg 
     29   1.1  mrg void __aarch64_sync_cache_range (const void *, const void *);
     30   1.1  mrg 
     31   1.1  mrg void
     32   1.1  mrg __aarch64_sync_cache_range (const void *base, const void *end)
     33   1.1  mrg {
     34   1.1  mrg   unsigned icache_lsize;
     35   1.1  mrg   unsigned dcache_lsize;
     36   1.1  mrg   static unsigned int cache_info = 0;
     37   1.1  mrg   const char *address;
     38   1.1  mrg 
     39   1.1  mrg   if (! cache_info)
     40   1.1  mrg     /* CTR_EL0 [3:0] contains log2 of icache line size in words.
     41   1.1  mrg        CTR_EL0 [19:16] contains log2 of dcache line size in words.  */
     42   1.1  mrg     asm volatile ("mrs\t%0, ctr_el0":"=r" (cache_info));
     43   1.1  mrg 
     44   1.1  mrg   icache_lsize = 4 << (cache_info & 0xF);
     45   1.1  mrg   dcache_lsize = 4 << ((cache_info >> 16) & 0xF);
     46   1.1  mrg 
     47   1.9  mrg   /* If CTR_EL0.IDC is enabled, Data cache clean to the Point of Unification is
     48   1.9  mrg      not required for instruction to data coherence.  */
     49   1.9  mrg 
     50   1.9  mrg   if (((cache_info >> CTR_IDC_SHIFT) & 0x1) == 0x0) {
     51   1.9  mrg     /* Loop over the address range, clearing one cache line at once.
     52   1.9  mrg        Data cache must be flushed to unification first to make sure the
     53   1.9  mrg        instruction cache fetches the updated data.  'end' is exclusive,
     54   1.9  mrg        as per the GNU definition of __clear_cache.  */
     55   1.9  mrg 
     56   1.9  mrg     /* Make the start address of the loop cache aligned.  */
     57   1.9  mrg     address = (const char*) ((__UINTPTR_TYPE__) base
     58   1.9  mrg 			     & ~ (__UINTPTR_TYPE__) (dcache_lsize - 1));
     59   1.9  mrg 
     60   1.9  mrg     for (; address < (const char *) end; address += dcache_lsize)
     61   1.9  mrg       asm volatile ("dc\tcvau, %0"
     62   1.9  mrg 		    :
     63   1.9  mrg 		    : "r" (address)
     64   1.9  mrg 		    : "memory");
     65   1.9  mrg   }
     66   1.1  mrg 
     67   1.1  mrg   asm volatile ("dsb\tish" : : : "memory");
     68   1.1  mrg 
     69   1.9  mrg   /* If CTR_EL0.DIC is enabled, Instruction cache cleaning to the Point of
     70   1.9  mrg      Unification is not required for instruction to data coherence.  */
     71   1.9  mrg 
     72   1.9  mrg   if (((cache_info >> CTR_DIC_SHIFT) & 0x1) == 0x0) {
     73   1.9  mrg     /* Make the start address of the loop cache aligned.  */
     74   1.9  mrg     address = (const char*) ((__UINTPTR_TYPE__) base
     75   1.9  mrg 			     & ~ (__UINTPTR_TYPE__) (icache_lsize - 1));
     76   1.9  mrg 
     77   1.9  mrg     for (; address < (const char *) end; address += icache_lsize)
     78   1.9  mrg       asm volatile ("ic\tivau, %0"
     79   1.9  mrg 		    :
     80   1.9  mrg 		    : "r" (address)
     81   1.9  mrg 		    : "memory");
     82   1.9  mrg 
     83   1.9  mrg     asm volatile ("dsb\tish" : : : "memory");
     84   1.9  mrg   }
     85   1.1  mrg 
     86   1.9  mrg   asm volatile("isb" : : : "memory");
     87   1.1  mrg }
     88