244 lines
6.5 KiB
C++
244 lines
6.5 KiB
C++
/*
|
|
ZReferenceCounter.hpp
|
|
Author: James Russell <jcrussell@762studios.com>
|
|
Created: 3/22/2012
|
|
|
|
Purpose:
|
|
|
|
Defines a structure that is usually allocated on the heap and will maintain a strong
|
|
reference count, a weak reference count, and an integer flag used to signal state. This
|
|
reference counter is designed to be thread safe by using atomic operations.
|
|
|
|
License:
|
|
|
|
TODO
|
|
|
|
*/
|
|
|
|
#pragma once
|
|
|
|
#ifndef _ZREFERENCECOUNTER_HPP
|
|
#define _ZREFERENCECOUNTER_HPP
|
|
|
|
#include <SST/SST_Atomic.h>
|
|
|
|
#include <ZUtil/ZUtilBuild.hpp>
|
|
#include <ZUtil/ZAssert.hpp>
|
|
#include <ZUtil/ZAlloc.hpp>
|
|
|
|
//The leading (negative) bit is used to indicate deallocation on the State int
|
|
#define ZREFCOUNTER_DEALLOC_BIT (0x80000000)
|
|
|
|
/*
|
|
ZReferenceCounter::CombinedCount is a 32-bit integer value containing
|
|
2x16-bit integer values -- the strong and weak reference counts. They
|
|
are stored as 0xWWWWSSSS, i.e. upper 16 bits are the weak reference
|
|
count, and the lower 16 bits are the strong reference count.
|
|
|
|
Do realize that this limits us to 65565 strong and 65565 weak references
|
|
without error conditions. We find this to be an acceptable limit.
|
|
*/
|
|
|
|
//These define how much to shift by to affect the strong/weak counters
|
|
#define ZREFCOUNTER_STRONG_SHIFT (0)
|
|
#define ZREFCOUNTER_WEAK_SHIFT (16)
|
|
|
|
//These masks are what we will use to mask the upper half (weak) and lower half (strong) parts
|
|
//of the integer reference count
|
|
#define ZREFCOUNTER_STRONG_MASK (0xFFFF << ZREFCOUNTER_STRONG_SHIFT)
|
|
#define ZREFCOUNTER_WEAK_MASK (0xFFFF << ZREFCOUNTER_WEAK_SHIFT)
|
|
|
|
//These values are used to add a single weak reference or a single strong reference to the
|
|
//reference count
|
|
#define ZREFCOUNTER_STRONG_REF (1 << ZREFCOUNTER_STRONG_SHIFT)
|
|
#define ZREFCOUNTER_WEAK_REF (1 << ZREFCOUNTER_WEAK_SHIFT)
|
|
|
|
//These macros extract the strong/weak reference counts respectively from the combined counts
|
|
#define ZREFCOUNTER_EXTRACT_STRONG_REF(x) (((x) >> ZREFCOUNTER_STRONG_SHIFT) & 0xFFFF)
|
|
#define ZREFCOUNTER_EXTRACT_WEAK_REF(x) (((x) >> ZREFCOUNTER_WEAK_SHIFT) & 0xFFFF)
|
|
|
|
/*
|
|
Reference counting struct. Keeps a reference count to an
|
|
object (both strong and weak), and a state signaling integer.
|
|
*/
|
|
struct ZReferenceCounter
|
|
{
|
|
/*
|
|
Integer we use for keeping track of strong and weak references within the
|
|
space of a single native integer. The upper half is used to determine
|
|
weak count. The lower half is used to determine strong count. This allows
|
|
us to use atomic operations on both the strong reference count and weak
|
|
reference count on architectures that do not have a DoubleCAS operation.
|
|
|
|
Must be unsigned so >> doesn't do arthimetic shift.
|
|
*/
|
|
volatile uint32_t CombinedCount;
|
|
|
|
/*
|
|
Flag used for signaling object usage state
|
|
|
|
> 0 - in use (number of users)
|
|
0 - not in use
|
|
< 0 - ready for deallocation (discounting deallocation bit gives number of users)
|
|
*/
|
|
volatile int State;
|
|
|
|
/*
|
|
Default constructor.
|
|
*/
|
|
ZReferenceCounter()
|
|
: CombinedCount(0), State(0) { }
|
|
|
|
/*
|
|
Destructor.
|
|
*/
|
|
~ZReferenceCounter()
|
|
{ }
|
|
|
|
/*
|
|
Using atomic operations, will increment the strong reference count.
|
|
*/
|
|
inline void GainStrongRef()
|
|
{
|
|
SST_Atomic_Add((volatile int*)&CombinedCount, ZREFCOUNTER_STRONG_REF);
|
|
}
|
|
|
|
/*
|
|
Using atomic operations, will increment the weak reference count.
|
|
*/
|
|
inline void GainWeakRef()
|
|
{
|
|
SST_Atomic_Add((volatile int*)&CombinedCount, ZREFCOUNTER_WEAK_REF);
|
|
}
|
|
|
|
/*
|
|
Gets the current strong reference count.
|
|
*/
|
|
inline uint32_t GetStrongRefCount()
|
|
{
|
|
return ZREFCOUNTER_EXTRACT_STRONG_REF(CombinedCount);
|
|
}
|
|
|
|
/*
|
|
Gets the current weak reference count.
|
|
*/
|
|
inline uint32_t GetWeakRefCount()
|
|
{
|
|
return ZREFCOUNTER_EXTRACT_WEAK_REF(CombinedCount);
|
|
}
|
|
|
|
/*
|
|
Using atomic operations, will decrement the strong reference count and
|
|
return the number of remaining references, both strong and weak, as part
|
|
of the same value.
|
|
|
|
To get the strong reference count, use ZREFCOUNTER_EXTRACT_STRONG_REF() on the return value.
|
|
|
|
To get the weak reference count, use ZREFCOUNTER_EXTRACT_WEAK_REF() on the return value.
|
|
*/
|
|
inline uint32_t LoseStrongRef()
|
|
{
|
|
return SST_Atomic_AddReturn((volatile int*)&CombinedCount, -ZREFCOUNTER_STRONG_REF);
|
|
}
|
|
|
|
/*
|
|
Using atomic operations, will decrement the weak reference count and
|
|
return the number of remaining references, both strong and weak, as part
|
|
of the same value.
|
|
|
|
To get the strong reference count, use ZREFCOUNTER_EXTRACT_STRONG_REF() on the return value.
|
|
|
|
To get the weak reference count, use ZREFCOUNTER_EXTRACT_WEAK_REF() on the return value.
|
|
*/
|
|
inline uint32_t LoseWeakRef()
|
|
{
|
|
return SST_Atomic_AddReturn((volatile int*)&CombinedCount, -ZREFCOUNTER_WEAK_REF);
|
|
}
|
|
|
|
/*
|
|
Using atomic operations, will set the state flag as 'deallocated' as soon as possible,
|
|
which will ensure anyone using 'SignalInUse' will get a 'false' return value.
|
|
*/
|
|
inline void SignalDeallocateObject()
|
|
{
|
|
//Check deallocated
|
|
if (State & ZREFCOUNTER_DEALLOC_BIT)
|
|
return;
|
|
|
|
//Set high bit
|
|
SST_Atomic_Or(&State, ZREFCOUNTER_DEALLOC_BIT);
|
|
|
|
//Wait logic
|
|
while ((State & ~ZREFCOUNTER_DEALLOC_BIT) != 0)
|
|
continue;
|
|
|
|
//State is guaranteed here to be == ZREFCOUNTER_DEALLOC_BIT
|
|
}
|
|
|
|
/*
|
|
Using atomic operations, will set the state flag as 'in use'. If unable to set
|
|
state flag as 'in use' because the object is deallocated, will return false.
|
|
|
|
This is important to weak reference holders to signal to strong reference holders that
|
|
the object should not be deallocated until the object is no longer in use.
|
|
*/
|
|
inline bool SignalInUse()
|
|
{
|
|
//Check deallocated
|
|
if (State & ZREFCOUNTER_DEALLOC_BIT)
|
|
return false;
|
|
|
|
int val = SST_Atomic_IncReturn(&State);
|
|
|
|
if (val & ZREFCOUNTER_DEALLOC_BIT)
|
|
{
|
|
SST_Atomic_Dec(&State); //Atomic decrement to ensure we don't starve the waiting deallocation thread
|
|
return false;
|
|
}
|
|
else
|
|
return true;
|
|
}
|
|
|
|
/*
|
|
Using atomic operations, will set the state flag as 'no longer in use'.
|
|
*/
|
|
inline void SignalUnused()
|
|
{
|
|
SST_Atomic_Dec(&State);
|
|
}
|
|
};
|
|
|
|
/*
|
|
Allocator class for ZReferenceCounter. Responsible for heap allocation and deletion of
|
|
ZReferenceCounter instances.
|
|
*/
|
|
class ZReferenceCounterAllocator
|
|
{
|
|
public:
|
|
/*
|
|
public ZSmartPointerAllocator::AllocateCounter
|
|
|
|
Allocation method that allocates a reference counter for use by the smart pointer.
|
|
|
|
@return (ZSmartPointerRefCounter*)
|
|
*/
|
|
ZReferenceCounter* AllocateCounter()
|
|
{
|
|
return znew ZReferenceCounter();
|
|
}
|
|
|
|
/*
|
|
public ZSmartPointerAllocator::DeallocateCounter
|
|
|
|
Deallocates a reference counter that was provided by AllocateCounter.
|
|
|
|
@param _counter - the counter to deallocate
|
|
*/
|
|
void DeallocateCounter(ZReferenceCounter* _counter)
|
|
{
|
|
zdelete _counter;
|
|
}
|
|
};
|
|
|
|
#endif
|