From 40842ae998ba587a0514327e805fc7936db76b3b Mon Sep 17 00:00:00 2001 From: Guillaume Quintard Date: Wed, 30 Mar 2022 19:45:37 -0700 Subject: [PATCH] align vmb.h with the varnish version --- configure.ac | 2 ++ src/vmb.h | 52 ++++++++++++++++------------------------------------ 2 files changed, 18 insertions(+), 36 deletions(-) diff --git a/configure.ac b/configure.ac index a78fdbf..9b54203 100644 --- a/configure.ac +++ b/configure.ac @@ -13,6 +13,8 @@ AM_PROG_AR LT_PREREQ([2.2.6]) LT_INIT([dlopen disable-static]) +AC_CHECK_HEADERS([stdatomic.h]) + AC_ARG_WITH([rst2man], AS_HELP_STRING( [--with-rst2man=PATH], diff --git a/src/vmb.h b/src/vmb.h index 9c148fb..2e3f525 100644 --- a/src/vmb.h +++ b/src/vmb.h @@ -4,6 +4,8 @@ * * Author: Poul-Henning Kamp * + * SPDX-License-Identifier: BSD-2-Clause + * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: @@ -27,60 +29,38 @@ * * Memory barriers * - * XXX: It is utterly braindamaged, that no standard facility for this - * XXX: is available. The "just use pthreads locking" excuse does not - * XXX: make sense, and does not apply to two unthreaded programs sharing - * XXX: a memory segment. */ #ifndef VMB_H_INCLUDED #define VMB_H_INCLUDED -#if defined(__FreeBSD__) -#include -#endif - -#if defined(__FreeBSD__) && __FreeBSD_version >= 800058 +#if defined(HAVE_STDATOMIC_H) && !defined(__FLEXELINT__) -#include -#include -#define VMB() mb() -#define VWMB() wmb() -#define VRMB() rmb() +# include +# define VWMB() atomic_thread_fence(memory_order_release) +# define VRMB() atomic_thread_fence(memory_order_acquire) #elif defined(__amd64__) && defined(__GNUC__) -#define VMB() __asm __volatile("mfence;" : : : "memory") -#define VWMB() __asm __volatile("sfence;" : : : "memory") -#define VRMB() __asm __volatile("lfence;" : : : "memory") - -#elif defined(__arm__) - -#define VMB() -#define VWMB() -#define VRMB() +# define VWMB() __asm __volatile("sfence;" : : : "memory") +# define VRMB() __asm __volatile("lfence;" : : : "memory") #elif defined(__i386__) && defined(__GNUC__) -#define VMB() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") -#define VWMB() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") -#define VRMB() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") +# define VWMB() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") +# define VRMB() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") #elif defined(__sparc64__) && defined(__GNUC__) -#define VMB() __asm__ __volatile__ ("membar #MemIssue": : :"memory") -#define VWMB() VMB() -#define VRMB() VMB() +# define VWMB() __asm__ __volatile__ ("membar #MemIssue": : :"memory") +# define VRMB() __asm__ __volatile__ ("membar #MemIssue": : :"memory") #else -#define VMB_NEEDS_PTHREAD_WORKAROUND_THIS_IS_BAD_FOR_PERFORMANCE 1 - -void vmb_pthread(void); - -#define VMB() vmb_pthread() -#define VWMB() vmb_pthread() -#define VRMB() vmb_pthread() +# define VMB_NEEDS_PTHREAD_WORKAROUND_THIS_IS_BAD_FOR_PERFORMANCE 1 + void vmb_pthread(void); +# define VWMB() vmb_pthread() +# define VRMB() vmb_pthread() #endif