Home | History | Annotate | Line # | Download | only in libcollector
      1 /* Copyright (C) 2021-2025 Free Software Foundation, Inc.
      2    Contributed by Oracle.
      3 
      4    This file is part of GNU Binutils.
      5 
      6    This program is free software; you can redistribute it and/or modify
      7    it under the terms of the GNU General Public License as published by
      8    the Free Software Foundation; either version 3, or (at your option)
      9    any later version.
     10 
     11    This program is distributed in the hope that it will be useful,
     12    but WITHOUT ANY WARRANTY; without even the implied warranty of
     13    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     14    GNU General Public License for more details.
     15 
     16    You should have received a copy of the GNU General Public License
     17    along with this program; if not, write to the Free Software
     18    Foundation, 51 Franklin Street - Fifth Floor, Boston,
     19    MA 02110-1301, USA.  */
     20 
     21 #ifndef _LIBCOL_UTIL_H
     22 #define _LIBCOL_UTIL_H
     23 
     24 #include <stdarg.h>
     25 #include <pthread.h>
     26 #include <signal.h>
     27 
     28 // LIBCOLLECTOR NOT I18N
     29 #define NTXT(x) x
     30 #define STXT(x) x
     31 
     32 extern int __collector_tracelevel;
     33 
     34 /* Initialization function */
     35 extern	int  __collector_util_init();
     36 extern  void __collector_libkstat_funcs_init();
     37 extern  void __collector_libscf_funcs_init();
     38 
     39 /* -------  functions from libcol_util.c ----------------- */
     40 extern void * __collector_memcpy (void *s1, const void *s2, size_t n);
     41 extern int (*__collector_sscanfp)(const char *restrict s, const char *restrict fmt, ...)
     42 	__attribute__ ((format (scanf, 2, 3)));
     43 extern char * __collector_strcat (char *s1, const char *s2);
     44 extern char * __collector_strchr (const char *s1, int chr);
     45 extern size_t __collector_strlcpy (char *dst, const char *src, size_t dstsize);
     46 extern char* __collector_strrchr (const char *str, int chr);
     47 extern size_t __collector_strlen (const char *s);
     48 extern size_t __collector_strlcat (char *dst, const char *src, size_t dstsize);
     49 extern char* __collector_strchr (const char *str, int chr);
     50 extern int __collector_strcmp (const char *s1, const char *s2);
     51 extern int __collector_strncmp (const char *s1, const char *s2, size_t n);
     52 extern char * __collector_strstr (const char *s1, const char *s2);
     53 extern size_t __collector_strncpy (char *dst, const char *src, size_t dstsize);
     54 extern size_t __collector_strncat (char *dst, const char *src, size_t dstsize);
     55 extern void * __collector_malloc (size_t size);
     56 extern void * __collector_calloc (size_t nelem, size_t elsize);
     57 extern char * __collector_strdup (const char * str);
     58 extern int __collector_strStartWith (const char *s1, const char *s2);
     59 extern int __collector_xml_snprintf (char *s, size_t n, const char *format, ...) __attribute__ ((format (printf, 3, 4)));
     60 extern int __collector_xml_vsnprintf (char *s, size_t n, const char *format, va_list args);
     61 
     62 /* -------  collector_thread ----------------- */
     63 extern pid_t __collector_gettid ();
     64 extern void __collector_ext_gettid_tsd_create_key ();
     65 typedef pthread_t collector_thread_t;
     66 #define __collector_lwp_self() ((collector_thread_t) ((unsigned long) __collector_gettid()))
     67 #define __collector_thr_self() ((collector_thread_t) ((unsigned long) __collector_gettid()))
     68 
     69 /* -------  collector_mutex ----------------- */
     70 /*
     71  * mutex_init is defined in libthread. If we don't want to interact
     72  * with libthread we should use memset to initialize mutexes
     73  */
     74 
     75 typedef volatile int collector_mutex_t;
     76 #define  COLLECTOR_MUTEX_INITIALIZER 0
     77 extern int __collector_mutex_lock (collector_mutex_t *mp);
     78 extern int __collector_mutex_unlock (collector_mutex_t *mp);
     79 extern int __collector_mutex_trylock (collector_mutex_t *mp);
     80 
     81 #define __collector_mutex_init(xx) \
     82   do { collector_mutex_t tmp=COLLECTOR_MUTEX_INITIALIZER; *(xx)=tmp; } while(0)
     83 
     84 struct DT_lineno;
     85 
     86 typedef enum
     87 {
     88   DFUNC_API = 1, /* dynamic function declared with API */
     89   DFUNC_JAVA, /* dynamically compiled java method */
     90   DFUNC_KERNEL /* dynamic code mapped by the kernel (Linux) */
     91 } dfunc_mode_t;
     92 
     93 extern void __collector_int_func_load (dfunc_mode_t mode, char *name,
     94 				       char *sourcename, void *vaddr,
     95 				       int size, int lntsize,
     96 				       struct DT_lineno *lntable);
     97 extern void __collector_int_func_unload (dfunc_mode_t mode, void *vaddr);
     98 
     99 extern int __collector_sigaction (int sig, const struct sigaction *nact,
    100 				  struct sigaction *oact);
    101 extern void __collector_SIGDFL_handler (int sig);
    102 extern int __collector_ext_itimer_set (int period);
    103 
    104 #if ARCH(Intel)
    105 /* Atomic functions on x86/x64 */
    106 
    107 /**
    108  * This function enables the inrementing (by one) of the value stored in target
    109  * to occur in an atomic manner.
    110  */
    111 static __attribute__ ((always_inline)) inline void
    112 __collector_inc_32 (uint32_t *ptr)
    113 {
    114   __asm__ __volatile__("lock; incl %0"
    115 		       : // "=m" (*ptr)    // output
    116 		       : "m" (*ptr)); // input
    117 }
    118 
    119 /**
    120  * This function enables the decrementing (by one) of the value stored in target
    121  * to occur in an atomic manner.
    122  */
    123 static __attribute__ ((always_inline)) inline void
    124 __collector_dec_32 (volatile uint32_t *ptr)
    125 {
    126   __asm__ __volatile__("lock; decl %0"
    127 		       : // "=m" (*ptr)    // output
    128 		       : "m" (*ptr)); // input
    129 }
    130 
    131 /**
    132  * This function subtrackts the value "off" of the value stored in target
    133  * to occur in an atomic manner, and returns new value stored in target.
    134  */
    135 static __attribute__ ((always_inline)) inline uint32_t
    136 __collector_subget_32 (uint32_t *ptr, uint32_t off)
    137 {
    138   uint32_t r;
    139   uint32_t offset = off;
    140   __asm__ __volatile__("movl %2, %0; negl %0; lock; xaddl %0, %1"
    141 		       : "=r" (r), "=m" (*ptr) /* output */
    142 		       : "a" (off), "r" (*ptr) /* input */
    143 		       );
    144   return (r - offset);
    145 }
    146 
    147 /**
    148  * This function returns the value of the stack pointer register
    149  */
    150 static __attribute__ ((always_inline)) inline void *
    151 __collector_getsp ()
    152 {
    153   void *r;
    154 #if WSIZE(32) || defined(__ILP32__)
    155   __asm__ __volatile__("movl %%esp, %0"
    156 #else
    157   __asm__ __volatile__("movq %%rsp, %0"
    158 #endif
    159 	  : "=r" (r)); // output
    160   return r;
    161 }
    162 
    163 /**
    164  * This function returns the value of the frame pointer register
    165  */
    166 static __attribute__ ((always_inline)) inline void *
    167 __collector_getfp ()
    168 {
    169   void *r;
    170 #if WSIZE(32) || defined(__ILP32__)
    171   __asm__ __volatile__("movl %%ebp, %0"
    172 #else
    173   __asm__ __volatile__("movq %%rbp, %0"
    174 #endif
    175 	  : "=r" (r)); // output
    176   return r;
    177 }
    178 
    179 /**
    180  * This function returns the value of the processor counter register
    181  */
    182 static __attribute__ ((always_inline)) inline void *
    183 __collector_getpc ()
    184 {
    185   void *r;
    186 #if defined(__x86_64__)
    187   __asm__ __volatile__("lea (%%rip), %0" : "=r" (r));
    188 #else
    189   __asm__ __volatile__("call  1f \n"
    190 		       "1: popl  %0" : "=r" (r));
    191 #endif
    192   return r;
    193 }
    194 
    195 /**
    196  * This function enables a compare and swap operation to occur atomically.
    197  * The 32-bit value stored in target is compared with "old". If these values
    198  * are equal, the value stored in target is replaced with "new". The old
    199  * 32-bit value stored in target is returned by the function whether or not
    200  * the replacement occurred.
    201  */
    202 static __attribute__ ((always_inline)) inline uint32_t
    203 __collector_cas_32 (volatile uint32_t *pdata, uint32_t old, uint32_t new)
    204 {
    205   uint32_t r;
    206   __asm__ __volatile__("lock; cmpxchgl %2, %1"
    207 		       : "=a" (r), "=m" (*pdata) : "r" (new),
    208 		       "a" (old), "m" (*pdata));
    209   return r;
    210 }
    211 /**
    212  * This function enables a compare and swap operation to occur atomically.
    213  * The 64-bit value stored in target is compared with "old". If these values
    214  * are equal, the value stored in target is replaced with "new". The old
    215  * 64-bit value stored in target is returned by the function whether or not
    216  * the replacement occurred.
    217  */
    218 static __attribute__ ((always_inline)) inline uint64_t
    219 __collector_cas_64p (volatile uint64_t *mem, uint64_t *old, uint64_t * new)
    220 {
    221   uint64_t r;
    222 #if WSIZE(32)
    223   uint32_t old1 = (uint32_t) (*old & 0xFFFFFFFFL);
    224   uint32_t old2 = (uint32_t) ((*old >> 32) & 0xFFFFFFFFL);
    225   uint32_t new1 = (uint32_t) (*new & 0xFFFFFFFFL);
    226   uint32_t new2 = (uint32_t) ((*new >> 32) & 0xFFFFFFFFL);
    227   uint32_t res1 = 0;
    228   uint32_t res2 = 0;
    229   __asm__ __volatile__(
    230       "movl %3, %%esi; lock; cmpxchg8b (%%esi); movl %%edx, %2; movl %%eax, %1"
    231       : "=m" (r), "=m" (res1), "=m" (res2) /* output */
    232       : "m" (mem), "a" (old1), "d" (old2), "b" (new1), "c" (new2) /* input */
    233       : "memory", "cc", "esi" //, "edx", "ecx", "ebx", "eax" /* clobbered register */
    234 		       );
    235   r = (((uint64_t) res2) << 32) | ((uint64_t) res1);
    236 #else
    237   __asm__ __volatile__( "lock; cmpxchgq %2, %1"
    238 		       : "=a" (r), "=m" (*mem) /* output */
    239 		       : "r" (*new), "a" (*old), "m" (*mem) /* input */
    240 		       : "%rcx", "rdx" /* clobbered register */
    241 		       );
    242 #endif
    243   return r;
    244 }
    245 /**
    246  * This function enables a compare and swap operation to occur atomically.
    247  * The 32-/64-bit value stored in target is compared with "cmp". If these values
    248  * are equal, the value stored in target is replaced with "new".
    249  * The old value stored in target is returned by the function whether or not
    250  * the replacement occurred.
    251  */
    252 static __attribute__ ((always_inline)) inline void *
    253 __collector_cas_ptr (void *mem, void *cmp, void *new)
    254 {
    255   void *r;
    256 #if WSIZE(32) || defined(__ILP32__)
    257   r = (void *) __collector_cas_32 ((volatile uint32_t *)mem, (uint32_t) cmp, (uint32_t)new);
    258 #else
    259   __asm__ __volatile__("lock; cmpxchgq %2, (%1)"
    260 		       : "=a" (r), "=b" (mem) /* output */
    261 		       : "r" (new), "a" (cmp), "b" (mem) /* input */
    262 		       );
    263 #endif
    264   return r;
    265 }
    266 
    267 #elif ARCH(Aarch64) || ARCH(RISCV)
    268 static __attribute__ ((always_inline)) inline uint32_t
    269 __collector_inc_32 (volatile uint32_t *ptr)
    270 {
    271   return __sync_add_and_fetch (ptr, 1);
    272 }
    273 
    274 static __attribute__ ((always_inline)) inline uint32_t
    275 __collector_dec_32 (volatile uint32_t *ptr)
    276 {
    277   return __sync_sub_and_fetch (ptr, 1);
    278 }
    279 
    280 static __attribute__ ((always_inline)) inline uint32_t
    281 __collector_subget_32 (volatile uint32_t *ptr, uint32_t off)
    282 {
    283   return __sync_sub_and_fetch (ptr, off);
    284 }
    285 
    286 static __attribute__ ((always_inline)) inline uint32_t
    287 __collector_cas_32 (volatile uint32_t *ptr, uint32_t old, uint32_t new)
    288 {
    289   return __sync_val_compare_and_swap (ptr, old, new);
    290 }
    291 
    292 static __attribute__ ((always_inline)) inline uint64_t
    293 __collector_cas_64p (volatile uint64_t *ptr, uint64_t *old, uint64_t * new)
    294 {
    295   return __sync_val_compare_and_swap (ptr, *old, *new);
    296 }
    297 
    298 static __attribute__ ((always_inline)) inline void *
    299 __collector_cas_ptr (void *ptr, void *old, void *new)
    300 {
    301   return (void *) __sync_val_compare_and_swap ((unsigned long *) ptr, (unsigned long) old, (unsigned long) new);
    302 }
    303 
    304 #else
    305 extern void __collector_flushw (); /* defined for SPARC only */
    306 extern void* __collector_getpc ();
    307 extern void* __collector_getsp ();
    308 extern void* __collector_getfp ();
    309 extern void __collector_inc_32 (volatile uint32_t *);
    310 extern void __collector_dec_32 (volatile uint32_t *);
    311 extern void* __collector_cas_ptr (volatile void *, void *, void *);
    312 extern uint32_t __collector_cas_32 (volatile uint32_t *, uint32_t, uint32_t);
    313 extern uint32_t __collector_subget_32 (volatile uint32_t *, uint32_t);
    314 extern uint64_t __collector_cas_64p (volatile uint64_t *, uint64_t *, uint64_t *);
    315 #endif /* ARCH() */
    316 #endif /* _LIBCOL_UTIL_H */
    317