diff options
author | gVisor bot <gvisor-bot@google.com> | 2019-06-02 06:44:55 +0000 |
---|---|---|
committer | gVisor bot <gvisor-bot@google.com> | 2019-06-02 06:44:55 +0000 |
commit | ceb0d792f328d1fc0692197d8856a43c3936a571 (patch) | |
tree | 83155f302eff44a78bcc30a3a08f4efe59a79379 /pkg/refs | |
parent | deb7ecf1e46862d54f4b102f2d163cfbcfc37f3b (diff) | |
parent | 216da0b733dbed9aad9b2ab92ac75bcb906fd7ee (diff) |
Merge 216da0b7 (automated)
Diffstat (limited to 'pkg/refs')
-rw-r--r-- | pkg/refs/refcounter.go | 303 | ||||
-rw-r--r-- | pkg/refs/refcounter_state.go | 35 | ||||
-rwxr-xr-x | pkg/refs/refs_state_autogen.go | 77 | ||||
-rwxr-xr-x | pkg/refs/weak_ref_list.go | 173 |
4 files changed, 588 insertions, 0 deletions
diff --git a/pkg/refs/refcounter.go b/pkg/refs/refcounter.go new file mode 100644 index 000000000..20f515391 --- /dev/null +++ b/pkg/refs/refcounter.go @@ -0,0 +1,303 @@ +// Copyright 2018 The gVisor Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package refs defines an interface for reference counted objects. It +// also provides a drop-in implementation called AtomicRefCount. +package refs + +import ( + "reflect" + "sync" + "sync/atomic" +) + +// RefCounter is the interface to be implemented by objects that are reference +// counted. +type RefCounter interface { + // IncRef increments the reference counter on the object. + IncRef() + + // DecRef decrements the reference counter on the object. + // + // Note that AtomicRefCounter.DecRef() does not support destructors. + // If a type has a destructor, it must implement its own DecRef() + // method and call AtomicRefCounter.DecRefWithDestructor(destructor). + DecRef() + + // TryIncRef attempts to increase the reference counter on the object, + // but may fail if all references have already been dropped. This + // should be used only in special circumstances, such as WeakRefs. + TryIncRef() bool + + // addWeakRef adds the given weak reference. Note that you should have a + // reference to the object when calling this method. + addWeakRef(*WeakRef) + + // dropWeakRef drops the given weak reference. Note that you should have + // a reference to the object when calling this method. + dropWeakRef(*WeakRef) +} + +// A WeakRefUser is notified when the last non-weak reference is dropped. +type WeakRefUser interface { + // WeakRefGone is called when the last non-weak reference is dropped. + WeakRefGone() +} + +// WeakRef is a weak reference. +// +// +stateify savable +type WeakRef struct { + weakRefEntry `state:"nosave"` + + // obj is an atomic value that points to the refCounter. + obj atomic.Value `state:".(savedReference)"` + + // user is notified when the weak ref is zapped by the object getting + // destroyed. + user WeakRefUser +} + +// weakRefPool is a pool of weak references to avoid allocations on the hot path. +var weakRefPool = sync.Pool{ + New: func() interface{} { + return &WeakRef{} + }, +} + +// NewWeakRef acquires a weak reference for the given object. +// +// An optional user will be notified when the last non-weak reference is +// dropped. +// +// Note that you must hold a reference to the object prior to getting a weak +// reference. (But you may drop the non-weak reference after that.) +func NewWeakRef(rc RefCounter, u WeakRefUser) *WeakRef { + w := weakRefPool.Get().(*WeakRef) + w.init(rc, u) + return w +} + +// get attempts to get a normal reference to the underlying object, and returns +// the object. If this weak reference has already been zapped (the object has +// been destroyed) then false is returned. If the object still exists, then +// true is returned. +func (w *WeakRef) get() (RefCounter, bool) { + rc := w.obj.Load().(RefCounter) + if v := reflect.ValueOf(rc); v == reflect.Zero(v.Type()) { + // This pointer has already been zapped by zap() below. We do + // this to ensure that the GC can collect the underlying + // RefCounter objects and they don't hog resources. + return nil, false + } + if !rc.TryIncRef() { + return nil, true + } + return rc, true +} + +// Get attempts to get a normal reference to the underlying object, and returns +// the object. If this fails (the object no longer exists), then nil will be +// returned instead. +func (w *WeakRef) Get() RefCounter { + rc, _ := w.get() + return rc +} + +// Drop drops this weak reference. You should always call drop when you are +// finished with the weak reference. You may not use this object after calling +// drop. +func (w *WeakRef) Drop() { + rc, ok := w.get() + if !ok { + // We've been zapped already. When the refcounter has called + // zap, we're guaranteed it's not holding references. + weakRefPool.Put(w) + return + } + if rc == nil { + // The object is in the process of being destroyed. We can't + // remove this from the object's list, nor can we return this + // object to the pool. It'll just be garbage collected. This is + // a rare edge case, so it's not a big deal. + return + } + + // At this point, we have a reference on the object. So destruction + // of the object (and zapping this weak reference) can't race here. + rc.dropWeakRef(w) + + // And now aren't on the object's list of weak references. So it won't + // zap us if this causes the reference count to drop to zero. + rc.DecRef() + + // Return to the pool. + weakRefPool.Put(w) +} + +// init initializes this weak reference. +func (w *WeakRef) init(rc RefCounter, u WeakRefUser) { + // Reset the contents of the weak reference. + // This is important because we are reseting the atomic value type. + // Otherwise, we could panic here if obj is different than what it was + // the last time this was used. + *w = WeakRef{} + w.user = u + w.obj.Store(rc) + + // In the load path, we may already have a nil value. So we need to + // check whether or not that is the case before calling addWeakRef. + if v := reflect.ValueOf(rc); v != reflect.Zero(v.Type()) { + rc.addWeakRef(w) + } +} + +// zap zaps this weak reference. +func (w *WeakRef) zap() { + // We need to be careful about types here. + // So reflect is involved. But it's not that bad. + rc := w.obj.Load() + typ := reflect.TypeOf(rc) + w.obj.Store(reflect.Zero(typ).Interface()) +} + +// AtomicRefCount keeps a reference count using atomic operations and calls the +// destructor when the count reaches zero. +// +// N.B. To allow the zero-object to be initialized, the count is offset by +// 1, that is, when refCount is n, there are really n+1 references. +// +// +stateify savable +type AtomicRefCount struct { + // refCount is composed of two fields: + // + // [32-bit speculative references]:[32-bit real references] + // + // Speculative references are used for TryIncRef, to avoid a + // CompareAndSwap loop. See IncRef, DecRef and TryIncRef for details of + // how these fields are used. + refCount int64 + + // mu protects the list below. + mu sync.Mutex `state:"nosave"` + + // weakRefs is our collection of weak references. + weakRefs weakRefList `state:"nosave"` +} + +// ReadRefs returns the current number of references. The returned count is +// inherently racy and is unsafe to use without external synchronization. +func (r *AtomicRefCount) ReadRefs() int64 { + // Account for the internal -1 offset on refcounts. + return atomic.LoadInt64(&r.refCount) + 1 +} + +// IncRef increments this object's reference count. While the count is kept +// greater than zero, the destructor doesn't get called. +// +// The sanity check here is limited to real references, since if they have +// dropped beneath zero then the object should have been destroyed. +func (r *AtomicRefCount) IncRef() { + if v := atomic.AddInt64(&r.refCount, 1); v <= 0 { + panic("Incrementing non-positive ref count") + } +} + +// TryIncRef attempts to increment the reference count, *unless the count has +// already reached zero*. If false is returned, then the object has already +// been destroyed, and the weak reference is no longer valid. If true if +// returned then a valid reference is now held on the object. +// +// To do this safely without a loop, a speculative reference is first acquired +// on the object. This allows multiple concurrent TryIncRef calls to +// distinguish other TryIncRef calls from genuine references held. +func (r *AtomicRefCount) TryIncRef() bool { + const speculativeRef = 1 << 32 + v := atomic.AddInt64(&r.refCount, speculativeRef) + if int32(v) < 0 { + // This object has already been freed. + atomic.AddInt64(&r.refCount, -speculativeRef) + return false + } + + // Turn into a real reference. + atomic.AddInt64(&r.refCount, -speculativeRef+1) + return true +} + +// addWeakRef adds the given weak reference. +func (r *AtomicRefCount) addWeakRef(w *WeakRef) { + r.mu.Lock() + r.weakRefs.PushBack(w) + r.mu.Unlock() +} + +// dropWeakRef drops the given weak reference. +func (r *AtomicRefCount) dropWeakRef(w *WeakRef) { + r.mu.Lock() + r.weakRefs.Remove(w) + r.mu.Unlock() +} + +// DecRefWithDestructor decrements the object's reference count. If the +// resulting count is negative and the destructor is not nil, then the +// destructor will be called. +// +// Note that speculative references are counted here. Since they were added +// prior to real references reaching zero, they will successfully convert to +// real references. In other words, we see speculative references only in the +// following case: +// +// A: TryIncRef [speculative increase => sees non-negative references] +// B: DecRef [real decrease] +// A: TryIncRef [transform speculative to real] +// +func (r *AtomicRefCount) DecRefWithDestructor(destroy func()) { + switch v := atomic.AddInt64(&r.refCount, -1); { + case v < -1: + panic("Decrementing non-positive ref count") + + case v == -1: + // Zap weak references. Note that at this point, all weak + // references are already invalid. That is, TryIncRef() will + // return false due to the reference count check. + r.mu.Lock() + for !r.weakRefs.Empty() { + w := r.weakRefs.Front() + // Capture the callback because w cannot be touched + // after it's zapped -- the owner is free it reuse it + // after that. + user := w.user + r.weakRefs.Remove(w) + w.zap() + + if user != nil { + r.mu.Unlock() + user.WeakRefGone() + r.mu.Lock() + } + } + r.mu.Unlock() + + // Call the destructor. + if destroy != nil { + destroy() + } + } +} + +// DecRef decrements this object's reference count. +func (r *AtomicRefCount) DecRef() { + r.DecRefWithDestructor(nil) +} diff --git a/pkg/refs/refcounter_state.go b/pkg/refs/refcounter_state.go new file mode 100644 index 000000000..7c99fd2b5 --- /dev/null +++ b/pkg/refs/refcounter_state.go @@ -0,0 +1,35 @@ +// Copyright 2018 The gVisor Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package refs + +// +stateify savable +type savedReference struct { + obj interface{} +} + +func (w *WeakRef) saveObj() savedReference { + // We load the object directly, because it is typed. This will be + // serialized and loaded as a typed value. + return savedReference{w.obj.Load()} +} + +func (w *WeakRef) loadObj(v savedReference) { + // See note above. This will be serialized and loaded typed. So we're okay + // as long as refs aren't changing during save and load (which they should + // not be). + // + // w.user is loaded before loadObj is called. + w.init(v.obj.(RefCounter), w.user) +} diff --git a/pkg/refs/refs_state_autogen.go b/pkg/refs/refs_state_autogen.go new file mode 100755 index 000000000..cc788a4fd --- /dev/null +++ b/pkg/refs/refs_state_autogen.go @@ -0,0 +1,77 @@ +// automatically generated by stateify. + +package refs + +import ( + "gvisor.googlesource.com/gvisor/pkg/state" +) + +func (x *WeakRef) beforeSave() {} +func (x *WeakRef) save(m state.Map) { + x.beforeSave() + var obj savedReference = x.saveObj() + m.SaveValue("obj", obj) + m.Save("user", &x.user) +} + +func (x *WeakRef) afterLoad() {} +func (x *WeakRef) load(m state.Map) { + m.Load("user", &x.user) + m.LoadValue("obj", new(savedReference), func(y interface{}) { x.loadObj(y.(savedReference)) }) +} + +func (x *AtomicRefCount) beforeSave() {} +func (x *AtomicRefCount) save(m state.Map) { + x.beforeSave() + m.Save("refCount", &x.refCount) +} + +func (x *AtomicRefCount) afterLoad() {} +func (x *AtomicRefCount) load(m state.Map) { + m.Load("refCount", &x.refCount) +} + +func (x *savedReference) beforeSave() {} +func (x *savedReference) save(m state.Map) { + x.beforeSave() + m.Save("obj", &x.obj) +} + +func (x *savedReference) afterLoad() {} +func (x *savedReference) load(m state.Map) { + m.Load("obj", &x.obj) +} + +func (x *weakRefList) beforeSave() {} +func (x *weakRefList) save(m state.Map) { + x.beforeSave() + m.Save("head", &x.head) + m.Save("tail", &x.tail) +} + +func (x *weakRefList) afterLoad() {} +func (x *weakRefList) load(m state.Map) { + m.Load("head", &x.head) + m.Load("tail", &x.tail) +} + +func (x *weakRefEntry) beforeSave() {} +func (x *weakRefEntry) save(m state.Map) { + x.beforeSave() + m.Save("next", &x.next) + m.Save("prev", &x.prev) +} + +func (x *weakRefEntry) afterLoad() {} +func (x *weakRefEntry) load(m state.Map) { + m.Load("next", &x.next) + m.Load("prev", &x.prev) +} + +func init() { + state.Register("refs.WeakRef", (*WeakRef)(nil), state.Fns{Save: (*WeakRef).save, Load: (*WeakRef).load}) + state.Register("refs.AtomicRefCount", (*AtomicRefCount)(nil), state.Fns{Save: (*AtomicRefCount).save, Load: (*AtomicRefCount).load}) + state.Register("refs.savedReference", (*savedReference)(nil), state.Fns{Save: (*savedReference).save, Load: (*savedReference).load}) + state.Register("refs.weakRefList", (*weakRefList)(nil), state.Fns{Save: (*weakRefList).save, Load: (*weakRefList).load}) + state.Register("refs.weakRefEntry", (*weakRefEntry)(nil), state.Fns{Save: (*weakRefEntry).save, Load: (*weakRefEntry).load}) +} diff --git a/pkg/refs/weak_ref_list.go b/pkg/refs/weak_ref_list.go new file mode 100755 index 000000000..df8e98bf5 --- /dev/null +++ b/pkg/refs/weak_ref_list.go @@ -0,0 +1,173 @@ +package refs + +// ElementMapper provides an identity mapping by default. +// +// This can be replaced to provide a struct that maps elements to linker +// objects, if they are not the same. An ElementMapper is not typically +// required if: Linker is left as is, Element is left as is, or Linker and +// Element are the same type. +type weakRefElementMapper struct{} + +// linkerFor maps an Element to a Linker. +// +// This default implementation should be inlined. +// +//go:nosplit +func (weakRefElementMapper) linkerFor(elem *WeakRef) *WeakRef { return elem } + +// List is an intrusive list. Entries can be added to or removed from the list +// in O(1) time and with no additional memory allocations. +// +// The zero value for List is an empty list ready to use. +// +// To iterate over a list (where l is a List): +// for e := l.Front(); e != nil; e = e.Next() { +// // do something with e. +// } +// +// +stateify savable +type weakRefList struct { + head *WeakRef + tail *WeakRef +} + +// Reset resets list l to the empty state. +func (l *weakRefList) Reset() { + l.head = nil + l.tail = nil +} + +// Empty returns true iff the list is empty. +func (l *weakRefList) Empty() bool { + return l.head == nil +} + +// Front returns the first element of list l or nil. +func (l *weakRefList) Front() *WeakRef { + return l.head +} + +// Back returns the last element of list l or nil. +func (l *weakRefList) Back() *WeakRef { + return l.tail +} + +// PushFront inserts the element e at the front of list l. +func (l *weakRefList) PushFront(e *WeakRef) { + weakRefElementMapper{}.linkerFor(e).SetNext(l.head) + weakRefElementMapper{}.linkerFor(e).SetPrev(nil) + + if l.head != nil { + weakRefElementMapper{}.linkerFor(l.head).SetPrev(e) + } else { + l.tail = e + } + + l.head = e +} + +// PushBack inserts the element e at the back of list l. +func (l *weakRefList) PushBack(e *WeakRef) { + weakRefElementMapper{}.linkerFor(e).SetNext(nil) + weakRefElementMapper{}.linkerFor(e).SetPrev(l.tail) + + if l.tail != nil { + weakRefElementMapper{}.linkerFor(l.tail).SetNext(e) + } else { + l.head = e + } + + l.tail = e +} + +// PushBackList inserts list m at the end of list l, emptying m. +func (l *weakRefList) PushBackList(m *weakRefList) { + if l.head == nil { + l.head = m.head + l.tail = m.tail + } else if m.head != nil { + weakRefElementMapper{}.linkerFor(l.tail).SetNext(m.head) + weakRefElementMapper{}.linkerFor(m.head).SetPrev(l.tail) + + l.tail = m.tail + } + + m.head = nil + m.tail = nil +} + +// InsertAfter inserts e after b. +func (l *weakRefList) InsertAfter(b, e *WeakRef) { + a := weakRefElementMapper{}.linkerFor(b).Next() + weakRefElementMapper{}.linkerFor(e).SetNext(a) + weakRefElementMapper{}.linkerFor(e).SetPrev(b) + weakRefElementMapper{}.linkerFor(b).SetNext(e) + + if a != nil { + weakRefElementMapper{}.linkerFor(a).SetPrev(e) + } else { + l.tail = e + } +} + +// InsertBefore inserts e before a. +func (l *weakRefList) InsertBefore(a, e *WeakRef) { + b := weakRefElementMapper{}.linkerFor(a).Prev() + weakRefElementMapper{}.linkerFor(e).SetNext(a) + weakRefElementMapper{}.linkerFor(e).SetPrev(b) + weakRefElementMapper{}.linkerFor(a).SetPrev(e) + + if b != nil { + weakRefElementMapper{}.linkerFor(b).SetNext(e) + } else { + l.head = e + } +} + +// Remove removes e from l. +func (l *weakRefList) Remove(e *WeakRef) { + prev := weakRefElementMapper{}.linkerFor(e).Prev() + next := weakRefElementMapper{}.linkerFor(e).Next() + + if prev != nil { + weakRefElementMapper{}.linkerFor(prev).SetNext(next) + } else { + l.head = next + } + + if next != nil { + weakRefElementMapper{}.linkerFor(next).SetPrev(prev) + } else { + l.tail = prev + } +} + +// Entry is a default implementation of Linker. Users can add anonymous fields +// of this type to their structs to make them automatically implement the +// methods needed by List. +// +// +stateify savable +type weakRefEntry struct { + next *WeakRef + prev *WeakRef +} + +// Next returns the entry that follows e in the list. +func (e *weakRefEntry) Next() *WeakRef { + return e.next +} + +// Prev returns the entry that precedes e in the list. +func (e *weakRefEntry) Prev() *WeakRef { + return e.prev +} + +// SetNext assigns 'entry' as the entry that follows e in the list. +func (e *weakRefEntry) SetNext(elem *WeakRef) { + e.next = elem +} + +// SetPrev assigns 'entry' as the entry that precedes e in the list. +func (e *weakRefEntry) SetPrev(elem *WeakRef) { + e.prev = elem +} |