forked from mirror/ants
Improve the internal spin-lock
This commit is contained in:
parent
a2ad870d2d
commit
a71395c7c8
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2019 Andy Pan. All rights reserved.
|
// Copyright 2019 Andy Pan & Dietoad. All rights reserved.
|
||||||
// Use of this source code is governed by an MIT-style
|
// Use of this source code is governed by an MIT-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
@ -13,9 +13,13 @@ import (
|
||||||
type spinLock uint32
|
type spinLock uint32
|
||||||
|
|
||||||
func (sl *spinLock) Lock() {
|
func (sl *spinLock) Lock() {
|
||||||
|
backoff := 1
|
||||||
for !atomic.CompareAndSwapUint32((*uint32)(sl), 0, 1) {
|
for !atomic.CompareAndSwapUint32((*uint32)(sl), 0, 1) {
|
||||||
|
for i := 0; i < backoff; i++ {
|
||||||
runtime.Gosched()
|
runtime.Gosched()
|
||||||
}
|
}
|
||||||
|
backoff <<= 1
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sl *spinLock) Unlock() {
|
func (sl *spinLock) Unlock() {
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
// Copyright 2021 Andy Pan & Dietoad. All rights reserved.
|
||||||
|
// Use of this source code is governed by an MIT-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package internal
|
||||||
|
|
||||||
|
import (
|
||||||
|
"runtime"
|
||||||
|
"sync"
|
||||||
|
"sync/atomic"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
type originSpinLock uint32
|
||||||
|
|
||||||
|
func (sl *originSpinLock) Lock() {
|
||||||
|
for !atomic.CompareAndSwapUint32((*uint32)(sl), 0, 1) {
|
||||||
|
runtime.Gosched()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sl *originSpinLock) Unlock() {
|
||||||
|
atomic.StoreUint32((*uint32)(sl), 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetOriginSpinLock() sync.Locker {
|
||||||
|
return new(originSpinLock)
|
||||||
|
}
|
||||||
|
|
||||||
|
type backOffSpinLock uint32
|
||||||
|
|
||||||
|
func (sl *backOffSpinLock) Lock() {
|
||||||
|
wait := 1
|
||||||
|
for !atomic.CompareAndSwapUint32((*uint32)(sl), 0, 1) {
|
||||||
|
for i := 0; i < wait; i++ {
|
||||||
|
runtime.Gosched()
|
||||||
|
}
|
||||||
|
wait <<= 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sl *backOffSpinLock) Unlock() {
|
||||||
|
atomic.StoreUint32((*uint32)(sl), 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetBackOffSpinLock() sync.Locker {
|
||||||
|
return new(backOffSpinLock)
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkMutex(b *testing.B) {
|
||||||
|
m := sync.Mutex{}
|
||||||
|
b.RunParallel(func(pb *testing.PB) {
|
||||||
|
for pb.Next() {
|
||||||
|
m.Lock()
|
||||||
|
m.Unlock()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkSpinLock(b *testing.B) {
|
||||||
|
spin := GetOriginSpinLock()
|
||||||
|
b.RunParallel(func(pb *testing.PB) {
|
||||||
|
for pb.Next() {
|
||||||
|
spin.Lock()
|
||||||
|
spin.Unlock()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkBackOffSpinLock(b *testing.B) {
|
||||||
|
spin := GetBackOffSpinLock()
|
||||||
|
b.RunParallel(func(pb *testing.PB) {
|
||||||
|
for pb.Next() {
|
||||||
|
spin.Lock()
|
||||||
|
spin.Unlock()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
Loading…
Reference in New Issue