sync: move sync.atomic2 to sync.stdatomic, cleanup

pull/12987/head
Delyan Angelov 2021-12-28 10:12:40 +02:00
parent c1711b8f05
commit 730b2a9263
No known key found for this signature in database
GPG Key ID: 66886C0F12D595ED
7 changed files with 162 additions and 188 deletions

View File

@ -1,88 +0,0 @@
module atomic2
/*
Implements the atomic operations. For now TCC does not support
the atomic versions on nix so it uses locks to simulate the same behavor.
On windows tcc can simulate with other atomic operations.
The @VEXEROOT/thirdparty/stdatomic contains compability header files
for stdatomic that supports both nix, windows and c++.
This implementations should be regarded as alpha stage and be
further tested.
*/
#flag windows -I @VEXEROOT/thirdparty/stdatomic/win
#flag linux -I @VEXEROOT/thirdparty/stdatomic/nix
#flag darwin -I @VEXEROOT/thirdparty/stdatomic/nix
#flag freebsd -I @VEXEROOT/thirdparty/stdatomic/nix
#flag solaris -I @VEXEROOT/thirdparty/stdatomic/nix
$if linux {
$if tinyc {
$if amd64 {
// most Linux distributions have /usr/lib/libatomic.so, but Ubuntu uses gcc version specific dir
#flag -L/usr/lib/gcc/x86_64-linux-gnu/6
#flag -L/usr/lib/gcc/x86_64-linux-gnu/7
#flag -L/usr/lib/gcc/x86_64-linux-gnu/8
#flag -L/usr/lib/gcc/x86_64-linux-gnu/9
#flag -L/usr/lib/gcc/x86_64-linux-gnu/10
#flag -L/usr/lib/gcc/x86_64-linux-gnu/11
#flag -L/usr/lib/gcc/x86_64-linux-gnu/12
} $else $if arm64 {
#flag -L/usr/lib/gcc/aarch64-linux-gnu/6
#flag -L/usr/lib/gcc/aarch64-linux-gnu/7
#flag -L/usr/lib/gcc/aarch64-linux-gnu/8
#flag -L/usr/lib/gcc/aarch64-linux-gnu/9
#flag -L/usr/lib/gcc/aarch64-linux-gnu/10
#flag -L/usr/lib/gcc/aarch64-linux-gnu/11
#flag -L/usr/lib/gcc/aarch64-linux-gnu/12
}
#flag -latomic
}
}
#include <atomic.h>
// add_u64 adds provided delta as an atomic operation
pub fn add_u64(ptr &u64, delta int) bool {
res := C.atomic_fetch_add_u64(voidptr(ptr), delta)
return res == 0
}
// sub_u64 subtracts provided delta as an atomic operation
pub fn sub_u64(ptr &u64, delta int) bool {
res := C.atomic_fetch_sub_u64(voidptr(ptr), delta)
return res == 0
}
// add_i64 adds provided delta as an atomic operation
pub fn add_i64(ptr &i64, delta int) bool {
res := C.atomic_fetch_add_u64(voidptr(ptr), delta)
return res == 0
}
// add_i64 subtracts provided delta as an atomic operation
pub fn sub_i64(ptr &i64, delta int) bool {
res := C.atomic_fetch_sub_u64(voidptr(ptr), delta)
return res == 0
}
// atomic store/load operations have to be used when there might be another concurrent access
// atomicall set a value
pub fn store_u64(ptr &u64, val u64) {
C.atomic_store_u64(voidptr(ptr), val)
}
// atomicall get a value
pub fn load_u64(ptr &u64) u64 {
return C.atomic_load_u64(voidptr(ptr))
}
// atomicall set a value
pub fn store_i64(ptr &i64, val i64) {
C.atomic_store_u64(voidptr(ptr), val)
}
// atomicall get a value
pub fn load_i64(ptr &i64) i64 {
return i64(C.atomic_load_u64(voidptr(ptr)))
}

View File

@ -2,70 +2,9 @@ module sync
import time
import rand
import sync.stdatomic
$if windows {
#flag -I @VEXEROOT/thirdparty/stdatomic/win
} $else {
#flag -I @VEXEROOT/thirdparty/stdatomic/nix
}
$if linux {
$if tinyc {
$if amd64 {
// most Linux distributions have /usr/lib/libatomic.so, but Ubuntu uses gcc version specific dir
#flag -L/usr/lib/gcc/x86_64-linux-gnu/6
#flag -L/usr/lib/gcc/x86_64-linux-gnu/7
#flag -L/usr/lib/gcc/x86_64-linux-gnu/8
#flag -L/usr/lib/gcc/x86_64-linux-gnu/9
#flag -L/usr/lib/gcc/x86_64-linux-gnu/10
#flag -L/usr/lib/gcc/x86_64-linux-gnu/11
#flag -L/usr/lib/gcc/x86_64-linux-gnu/12
} $else $if arm64 {
#flag -L/usr/lib/gcc/aarch64-linux-gnu/6
#flag -L/usr/lib/gcc/aarch64-linux-gnu/7
#flag -L/usr/lib/gcc/aarch64-linux-gnu/8
#flag -L/usr/lib/gcc/aarch64-linux-gnu/9
#flag -L/usr/lib/gcc/aarch64-linux-gnu/10
#flag -L/usr/lib/gcc/aarch64-linux-gnu/11
#flag -L/usr/lib/gcc/aarch64-linux-gnu/12
}
#flag -latomic
}
}
#include <atomic.h>
// The following functions are actually generic in C
fn C.atomic_load_ptr(voidptr) voidptr
fn C.atomic_store_ptr(voidptr, voidptr)
fn C.atomic_compare_exchange_weak_ptr(voidptr, voidptr, voidptr) bool
fn C.atomic_compare_exchange_strong_ptr(voidptr, voidptr, voidptr) bool
fn C.atomic_exchange_ptr(voidptr, voidptr) voidptr
fn C.atomic_fetch_add_ptr(voidptr, voidptr) voidptr
fn C.atomic_fetch_sub_ptr(voidptr, voidptr) voidptr
fn C.atomic_load_u16(voidptr) u16
fn C.atomic_store_u16(voidptr, u16)
fn C.atomic_compare_exchange_weak_u16(voidptr, voidptr, u16) bool
fn C.atomic_compare_exchange_strong_u16(voidptr, voidptr, u16) bool
fn C.atomic_exchange_u16(voidptr, u16) u16
fn C.atomic_fetch_add_u16(voidptr, u16) u16
fn C.atomic_fetch_sub_u16(voidptr, u16) u16
fn C.atomic_load_u32(voidptr) u32
fn C.atomic_store_u32(voidptr, u32)
fn C.atomic_compare_exchange_weak_u32(voidptr, voidptr, u32) bool
fn C.atomic_compare_exchange_strong_u32(voidptr, voidptr, u32) bool
fn C.atomic_exchange_u32(voidptr, u32) u32
fn C.atomic_fetch_add_u32(voidptr, u32) u32
fn C.atomic_fetch_sub_u32(voidptr, u32) u32
fn C.atomic_load_u64(voidptr) u64
fn C.atomic_store_u64(voidptr, u64)
fn C.atomic_compare_exchange_weak_u64(voidptr, voidptr, u64) bool
fn C.atomic_compare_exchange_strong_u64(voidptr, voidptr, u64) bool
fn C.atomic_exchange_u64(voidptr, u64) u64
fn C.atomic_fetch_add_u64(voidptr, u64) u64
fn C.atomic_fetch_sub_u64(voidptr, u64) u64
const aops_used = stdatomic.used
const (
// how often to try to get data without blocking before to wait for semaphore

View File

@ -1,6 +1,6 @@
module sync
import sync.atomic2
import sync.stdatomic
pub struct ManyTimes {
mut:
@ -21,7 +21,7 @@ pub fn new_many_times(times u64) &ManyTimes {
// do execute the function only setting times.
pub fn (mut m ManyTimes) do(f fn ()) {
if atomic2.load_u64(&m.count) < m.times {
if stdatomic.load_u64(&m.count) < m.times {
m.do_slow(f)
}
}
@ -29,7 +29,7 @@ pub fn (mut m ManyTimes) do(f fn ()) {
fn (mut m ManyTimes) do_slow(f fn ()) {
m.m.@lock()
if m.count < m.times {
atomic2.store_u64(&m.count, m.count + 1)
stdatomic.store_u64(&m.count, m.count + 1)
f()
}
m.m.unlock()

View File

@ -1,6 +1,6 @@
module sync
import sync.atomic2
import sync.stdatomic
pub struct Once {
mut:
@ -18,7 +18,7 @@ pub fn new_once() &Once {
// do execute the function only once.
pub fn (mut o Once) do(f fn ()) {
if atomic2.load_u64(&o.count) < 1 {
if stdatomic.load_u64(&o.count) < 1 {
o.do_slow(f)
}
}
@ -26,7 +26,7 @@ pub fn (mut o Once) do(f fn ()) {
fn (mut o Once) do_slow(f fn ()) {
o.m.@lock()
if o.count < 1 {
atomic2.store_u64(&o.count, 1)
stdatomic.store_u64(&o.count, 1)
f()
}
o.m.unlock()

View File

@ -0,0 +1,71 @@
module stdatomic
// The @VEXEROOT/thirdparty/stdatomic contains compatibility headers
// for stdatomic, that supports both nix, windows and c++.
$if windows {
#flag -I @VEXEROOT/thirdparty/stdatomic/win
} $else {
#flag -I @VEXEROOT/thirdparty/stdatomic/nix
}
$if linux {
$if tinyc {
$if amd64 {
// most Linux distributions have /usr/lib/libatomic.so,
// but Ubuntu uses gcc version specific dir
#flag -L/usr/lib/gcc/x86_64-linux-gnu/6
#flag -L/usr/lib/gcc/x86_64-linux-gnu/7
#flag -L/usr/lib/gcc/x86_64-linux-gnu/8
#flag -L/usr/lib/gcc/x86_64-linux-gnu/9
#flag -L/usr/lib/gcc/x86_64-linux-gnu/10
#flag -L/usr/lib/gcc/x86_64-linux-gnu/11
#flag -L/usr/lib/gcc/x86_64-linux-gnu/12
} $else $if arm64 {
#flag -L/usr/lib/gcc/aarch64-linux-gnu/6
#flag -L/usr/lib/gcc/aarch64-linux-gnu/7
#flag -L/usr/lib/gcc/aarch64-linux-gnu/8
#flag -L/usr/lib/gcc/aarch64-linux-gnu/9
#flag -L/usr/lib/gcc/aarch64-linux-gnu/10
#flag -L/usr/lib/gcc/aarch64-linux-gnu/11
#flag -L/usr/lib/gcc/aarch64-linux-gnu/12
}
#flag -latomic
}
}
#include <atomic.h>
// The following functions are actually generic in C
fn C.atomic_load_ptr(voidptr) voidptr
fn C.atomic_store_ptr(voidptr, voidptr)
fn C.atomic_compare_exchange_weak_ptr(voidptr, voidptr, voidptr) bool
fn C.atomic_compare_exchange_strong_ptr(voidptr, voidptr, voidptr) bool
fn C.atomic_exchange_ptr(voidptr, voidptr) voidptr
fn C.atomic_fetch_add_ptr(voidptr, voidptr) voidptr
fn C.atomic_fetch_sub_ptr(voidptr, voidptr) voidptr
fn C.atomic_load_u16(voidptr) u16
fn C.atomic_store_u16(voidptr, u16)
fn C.atomic_compare_exchange_weak_u16(voidptr, voidptr, u16) bool
fn C.atomic_compare_exchange_strong_u16(voidptr, voidptr, u16) bool
fn C.atomic_exchange_u16(voidptr, u16) u16
fn C.atomic_fetch_add_u16(voidptr, u16) u16
fn C.atomic_fetch_sub_u16(voidptr, u16) u16
fn C.atomic_load_u32(voidptr) u32
fn C.atomic_store_u32(voidptr, u32)
fn C.atomic_compare_exchange_weak_u32(voidptr, voidptr, u32) bool
fn C.atomic_compare_exchange_strong_u32(voidptr, voidptr, u32) bool
fn C.atomic_exchange_u32(voidptr, u32) u32
fn C.atomic_fetch_add_u32(voidptr, u32) u32
fn C.atomic_fetch_sub_u32(voidptr, u32) u32
fn C.atomic_load_u64(voidptr) u64
fn C.atomic_store_u64(voidptr, u64)
fn C.atomic_compare_exchange_weak_u64(voidptr, voidptr, u64) bool
fn C.atomic_compare_exchange_strong_u64(voidptr, voidptr, u64) bool
fn C.atomic_exchange_u64(voidptr, u64) u64
fn C.atomic_fetch_add_u64(voidptr, u64) u64
fn C.atomic_fetch_sub_u64(voidptr, u64) u64
pub const used = 1

View File

@ -0,0 +1,54 @@
module stdatomic
// Implement the atomic operations. For now TCC does not support the atomic
// versions on nix so it uses locks to simulate the same behavor.
//
// On windows tcc can simulate with other atomic operations.
//
// NB: this implementations should be regarded as alpha stage and be tested
// much more.
// add_u64 adds provided delta as an atomic operation
pub fn add_u64(ptr &u64, delta int) bool {
res := C.atomic_fetch_add_u64(voidptr(ptr), delta)
return res == 0
}
// sub_u64 subtracts provided delta as an atomic operation
pub fn sub_u64(ptr &u64, delta int) bool {
res := C.atomic_fetch_sub_u64(voidptr(ptr), delta)
return res == 0
}
// add_i64 adds provided delta as an atomic operation
pub fn add_i64(ptr &i64, delta int) bool {
res := C.atomic_fetch_add_u64(voidptr(ptr), delta)
return res == 0
}
// add_i64 subtracts provided delta as an atomic operation
pub fn sub_i64(ptr &i64, delta int) bool {
res := C.atomic_fetch_sub_u64(voidptr(ptr), delta)
return res == 0
}
// atomic store/load operations have to be used when there might be another concurrent access
// atomicall set a value
pub fn store_u64(ptr &u64, val u64) {
C.atomic_store_u64(voidptr(ptr), val)
}
// atomicall get a value
pub fn load_u64(ptr &u64) u64 {
return C.atomic_load_u64(voidptr(ptr))
}
// atomicall set a value
pub fn store_i64(ptr &i64, val i64) {
C.atomic_store_u64(voidptr(ptr), val)
}
// atomicall get a value
pub fn load_i64(ptr &i64) i64 {
return i64(C.atomic_load_u64(voidptr(ptr)))
}

View File

@ -1,9 +1,7 @@
import sync.atomic2
import sync
import sync.stdatomic
const (
iterations_per_cycle = 100_000
)
const iterations_per_cycle = 100_000
struct Counter {
mut:
@ -38,60 +36,60 @@ fn test_count_10_times_1_cycle_should_not_be_10_cycles_without_sync() {
eprintln('without synchronization the counter is: ${counter.counter:10} , expectedly != ${desired_iterations:10}')
}
fn test_count_plus_one_u64() {
fn test_atomic_count_plus_one_u64() {
mut c := u64(0)
atomic2.add_u64(&c, 1)
assert atomic2.load_u64(&c) == 1
stdatomic.add_u64(&c, 1)
assert stdatomic.load_u64(&c) == 1
}
fn test_count_plus_one_i64() {
fn test_atomic_count_plus_one_i64() {
mut c := i64(0)
atomic2.add_i64(&c, 1)
assert atomic2.load_i64(&c) == 1
stdatomic.add_i64(&c, 1)
assert stdatomic.load_i64(&c) == 1
}
fn test_count_plus_greater_than_one_u64() {
fn test_atomic_count_plus_greater_than_one_u64() {
mut c := u64(0)
atomic2.add_u64(&c, 10)
assert atomic2.load_u64(&c) == 10
stdatomic.add_u64(&c, 10)
assert stdatomic.load_u64(&c) == 10
}
fn test_count_plus_greater_than_one_i64() {
fn test_atomic_count_plus_greater_than_one_i64() {
mut c := i64(0)
atomic2.add_i64(&c, 10)
assert atomic2.load_i64(&c) == 10
stdatomic.add_i64(&c, 10)
assert stdatomic.load_i64(&c) == 10
}
fn test_count_minus_one_u64() {
fn test_atomic_count_minus_one_u64() {
mut c := u64(1)
atomic2.sub_u64(&c, 1)
assert atomic2.load_u64(&c) == 0
stdatomic.sub_u64(&c, 1)
assert stdatomic.load_u64(&c) == 0
}
fn test_count_minus_one_i64() {
fn test_atomic_count_minus_one_i64() {
mut c := i64(0)
atomic2.sub_i64(&c, 1)
assert atomic2.load_i64(&c) == -1
stdatomic.sub_i64(&c, 1)
assert stdatomic.load_i64(&c) == -1
}
fn test_count_minus_greater_than_one_u64() {
fn test_atomic_count_minus_greater_than_one_u64() {
mut c := u64(0)
atomic2.store_u64(&c, 10)
atomic2.sub_u64(&c, 10)
assert atomic2.load_u64(&c) == 0
stdatomic.store_u64(&c, 10)
stdatomic.sub_u64(&c, 10)
assert stdatomic.load_u64(&c) == 0
}
fn test_count_minus_greater_than_one_i64() {
fn test_atomic_count_minus_greater_than_one_i64() {
mut c := i64(0)
atomic2.store_i64(&c, 10)
atomic2.sub_i64(&c, 20)
assert atomic2.load_i64(&c) == -10
stdatomic.store_i64(&c, 10)
stdatomic.sub_i64(&c, 20)
assert stdatomic.load_i64(&c) == -10
}
// count_one_cycle counts the common counter iterations_per_cycle times in thread-safe way
fn count_one_cycle(mut counter Counter, mut group sync.WaitGroup) {
for i := 0; i < iterations_per_cycle; i++ {
atomic2.add_u64(&counter.counter, 1)
stdatomic.add_u64(&counter.counter, 1)
}
group.done()
}