vm: switch atomic::add to atomic::fetch_add

Win32 only provides fetch-and-add, not add-and-fetch, and we don't really care. Rename the functions so the order of operations is clear.
db4
Joe Groff 2011-11-07 17:58:53 -08:00
parent 0856bb5711
commit 212d012cb1
5 changed files with 38 additions and 38 deletions

View File

@ -17,25 +17,25 @@ namespace factor {
(LONG)new_val) == (LONG)old_val;
}
__forceinline static cell add(volatile cell *ptr, cell val)
__forceinline static cell fetch_add(volatile cell *ptr, cell val)
{
return (cell)InterlockedAdd(
return (cell)InterlockedExchangeAdd(
reinterpret_cast<volatile LONG *>(ptr), (LONG)val);
}
__forceinline static fixnum add(volatile fixnum *ptr, fixnum val)
__forceinline static fixnum fetch_add(volatile fixnum *ptr, fixnum val)
{
return (fixnum)InterlockedAdd(
return (fixnum)InterlockedExchangeAdd(
reinterpret_cast<volatile LONG *>(ptr), (LONG)val);
}
__forceinline static cell subtract(volatile cell *ptr, cell val)
__forceinline static cell fetch_subtract(volatile cell *ptr, cell val)
{
return (cell)InterlockedAdd(
return (cell)InterlockedExchangeAdd(
reinterpret_cast<volatile LONG *>(ptr), -(LONG)val);
}
__forceinline static fixnum subtract(volatile fixnum *ptr, fixnum val)
__forceinline static fixnum fetch_subtract(volatile fixnum *ptr, fixnum val)
{
return (fixnum)InterlockedAdd(
return (fixnum)InterlockedExchangeAdd(
reinterpret_cast<volatile LONG *>(ptr), -(LONG)val);
}

View File

@ -17,25 +17,25 @@ namespace factor {
(LONG64)new_val) == (LONG64)old_val;
}
__forceinline static cell add(volatile cell *ptr, cell val)
__forceinline static cell fetch_add(volatile cell *ptr, cell val)
{
return (cell)InterlockedAdd64(
return (cell)InterlockedExchangeAdd64(
reinterpret_cast<volatile LONG64 *>(ptr), (LONG64)val);
}
__forceinline static fixnum add(volatile fixnum *ptr, fixnum val)
__forceinline static fixnum fetch_add(volatile fixnum *ptr, fixnum val)
{
return (fixnum)InterlockedAdd64(
return (fixnum)InterlockedExchangeAdd64(
reinterpret_cast<volatile LONG64 *>(ptr), (LONG64)val);
}
__forceinline static cell subtract(volatile cell *ptr, cell val)
__forceinline static cell fetch_subtract(volatile cell *ptr, cell val)
{
return (cell)InterlockedAdd64(
return (cell)InterlockedExchangeAdd64(
reinterpret_cast<volatile LONG64 *>(ptr), -(LONG64)val);
}
__forceinline static fixnum subtract(volatile fixnum *ptr, fixnum val)
__forceinline static fixnum fetch_subtract(volatile fixnum *ptr, fixnum val)
{
return (fixnum)InterlockedAdd64(
return (fixnum)InterlockedExchangeAdd64(
reinterpret_cast<volatile LONG64 *>(ptr), -(LONG64)val);
}

View File

@ -13,25 +13,25 @@ namespace factor {
}
__attribute__((always_inline))
inline static cell add(volatile cell *ptr, cell val)
inline static cell fetch_add(volatile cell *ptr, cell val)
{
return __sync_add_and_fetch(ptr, val);
return __sync_fetch_and_add(ptr, val);
}
__attribute__((always_inline))
inline static fixnum add(volatile fixnum *ptr, fixnum val)
inline static fixnum fetch_add(volatile fixnum *ptr, fixnum val)
{
return __sync_add_and_fetch(ptr, val);
return __sync_fetch_and_add(ptr, val);
}
__attribute__((always_inline))
inline static cell subtract(volatile cell *ptr, cell val)
inline static cell fetch_subtract(volatile cell *ptr, cell val)
{
return __sync_sub_and_fetch(ptr, val);
return __sync_fetch_and_sub(ptr, val);
}
__attribute__((always_inline))
inline static fixnum subtract(volatile fixnum *ptr, fixnum val)
inline static fixnum fetch_subtract(volatile fixnum *ptr, fixnum val)
{
return __sync_sub_and_fetch(ptr, val);
return __sync_fetch_and_sub(ptr, val);
}
__attribute__((always_inline))

View File

@ -22,14 +22,14 @@ jit::jit(code_block_type type_, cell owner_, factor_vm *vm)
offset(0),
parent(vm)
{
fixnum count = atomic::add(&parent->current_jit_count, 1);
assert(count >= 1);
fixnum old_count = atomic::fetch_add(&parent->current_jit_count, 1);
assert(old_count >= 0);
}
jit::~jit()
{
fixnum count = atomic::subtract(&parent->current_jit_count, 1);
assert(count >= 0);
fixnum old_count = atomic::fetch_subtract(&parent->current_jit_count, 1);
assert(old_count >= 1);
}
void jit::emit_relocation(cell relocation_template_)

View File

@ -12,11 +12,11 @@ profiling_sample_count profiling_sample_count::record_counts() volatile
jit_sample_count,
foreign_sample_count,
foreign_thread_sample_count);
atomic::subtract(&sample_count, returned.sample_count);
atomic::subtract(&gc_sample_count, returned.gc_sample_count);
atomic::subtract(&jit_sample_count, returned.jit_sample_count);
atomic::subtract(&foreign_sample_count, returned.foreign_sample_count);
atomic::subtract(&foreign_thread_sample_count, returned.foreign_thread_sample_count);
atomic::fetch_subtract(&sample_count, returned.sample_count);
atomic::fetch_subtract(&gc_sample_count, returned.gc_sample_count);
atomic::fetch_subtract(&jit_sample_count, returned.jit_sample_count);
atomic::fetch_subtract(&foreign_sample_count, returned.foreign_sample_count);
atomic::fetch_subtract(&foreign_thread_sample_count, returned.foreign_thread_sample_count);
return returned;
}
@ -156,16 +156,16 @@ void factor_vm::enqueue_safepoint_sample(cell samples, cell pc, bool foreign_thr
{
if (atomic::load(&sampling_profiler_p))
{
atomic::add(&safepoint_sample_counts.sample_count, samples);
atomic::fetch_add(&safepoint_sample_counts.sample_count, samples);
if (foreign_thread_p)
atomic::add(&safepoint_sample_counts.foreign_thread_sample_count, samples);
atomic::fetch_add(&safepoint_sample_counts.foreign_thread_sample_count, samples);
else {
if (atomic::load(&current_gc_p))
atomic::add(&safepoint_sample_counts.gc_sample_count, samples);
atomic::fetch_add(&safepoint_sample_counts.gc_sample_count, samples);
if (atomic::load(&current_jit_count) > 0)
atomic::add(&safepoint_sample_counts.jit_sample_count, samples);
atomic::fetch_add(&safepoint_sample_counts.jit_sample_count, samples);
if (!code->seg->in_segment_p(pc))
atomic::add(&safepoint_sample_counts.foreign_sample_count, samples);
atomic::fetch_add(&safepoint_sample_counts.foreign_sample_count, samples);
}
code->guard_safepoint();
}