Browse Source

debug sparkstack

master
gjj 10 months ago
parent
commit
6c06abe48e
  1. 163
      algoim/sparkstack.hpp

163
algoim/sparkstack.hpp

@ -10,99 +10,94 @@
namespace algoim namespace algoim
{ {
template<typename T, int N> template <typename T, int N>
class xarray; class xarray;
template<typename T> template <typename T>
class SparkStack class SparkStack
{ {
static constexpr size_t capacity = 1u << 23; static constexpr size_t capacity = 1u << 23;
static constexpr int capacity_line = __LINE__ - 1; static constexpr int capacity_line = __LINE__ - 1;
template<typename ...R>
static size_t alloc(T** ptr, size_t len, R... rest)
{
if (pos() + len > capacity)
{
std::cerr << "SparkStack<T = " << typeid(T).name() << ">: capacity=" << capacity << " and pos=" << pos() << " insufficient for request len=" << len << '\n';
std::cerr << " consider increasing const 'capacity', defined on line " << capacity_line << " in file " << __FILE__ << '\n';
throw std::bad_alloc();
}
*ptr = base() + pos();
pos() += len;
if constexpr (sizeof...(rest) == 0)
return len;
else
return len + alloc(rest...);
}
static T* base() template <typename... R>
{ static size_t alloc(T** ptr, size_t len, R... rest)
static thread_local std::vector<T> buff(capacity); {
return buff.data(); if (pos() + len > capacity) {
std::cerr << "SparkStack<T = " << typeid(T).name() << ">: capacity=" << capacity << " and pos=" << pos()
<< " insufficient for request len=" << len << '\n';
std::cerr << " consider increasing const 'capacity', defined on line " << capacity_line << " in file "
<< __FILE__ << '\n';
throw std::bad_alloc();
} }
*ptr = base() + pos();
pos() += len;
if constexpr (sizeof...(rest) == 0)
return len;
else
return len + alloc(rest...);
}
static T* base()
{
static thread_local std::vector<T> buff(capacity);
return buff.data();
}
static ptrdiff_t& pos() static ptrdiff_t& pos()
{ {
static thread_local ptrdiff_t pos_ = 0; static thread_local ptrdiff_t pos_ = 0;
return pos_; return pos_;
}; };
size_t len_;
SparkStack(const SparkStack&) = delete;
SparkStack(SparkStack&&) = delete;
SparkStack& operator=(const SparkStack&) = delete;
SparkStack& operator=(SparkStack&&) = delete;
public:
// With parameters x0, n0, x1, n1, x2, n2, ..., allocate n0 elements and assign to x0, etc.
template<typename ...R>
explicit SparkStack(T** ptr, size_t len, R&&... rest)
{
len_ = alloc(ptr, len, rest...);
}
// With parameters value, x0, n0, x1, n1, x2, n2, ..., allocate n0 elements and assign to x0, ..., size_t len_;
// and assign the given value to all n0*n1*n2*... values allocated
template<typename ...R>
explicit SparkStack(T value, T** ptr, size_t len, R&&... rest)
{
T* start = base() + pos();
len_ = alloc(ptr, len, rest...);
for (int i = 0; i < len_; ++i)
*(start + i) = value;
}
// For each i, allocate ext(i) elements and assign to ptr(i) SparkStack(const SparkStack&) = delete;
template<int N> SparkStack(SparkStack&&) = delete;
explicit SparkStack(uvector<T*,N>& ptr, const uvector<int,N>& ext) SparkStack& operator=(const SparkStack&) = delete;
{ SparkStack& operator=(SparkStack&&) = delete;
len_ = 0;
for (int i = 0; i < N; ++i)
len_ += alloc(&ptr(i), ext(i));
}
// Allocate enough elements for one or more xarray's having pre-set extent public:
template<int ...N> // With parameters x0, n0, x1, n1, x2, n2, ..., allocate n0 elements and assign to x0, etc.
explicit SparkStack(xarray<T,N>&... a) template <typename... R>
{ explicit SparkStack(T** ptr, size_t len, R&&... rest)
len_ = (alloc(&a.data_, a.size()) + ...); {
} len_ = alloc(ptr, len, rest...);
}
// Release memory when the SparkStack object goes out of scope // With parameters value, x0, n0, x1, n1, x2, n2, ..., allocate n0 elements and assign to x0, ...,
~SparkStack() // and assign the given value to all n0*n1*n2*... values allocated
{ template <typename... R>
pos() -= len_; explicit SparkStack(T value, T** ptr, size_t len, R&&... rest)
} {
}; T* start = base() + pos();
len_ = alloc(ptr, len, rest...);
for (int i = 0; i < len_; ++i) *(start + i) = value;
}
// For each i, allocate ext(i) elements and assign to ptr(i)
template <int N>
explicit SparkStack(uvector<T*, N>& ptr, const uvector<int, N>& ext)
{
len_ = 0;
for (int i = 0; i < N; ++i) len_ += alloc(&ptr(i), ext(i));
}
#define algoim_CONCAT2(x, y) x ## y // Allocate enough elements for one or more xarray's having pre-set extent
#define algoim_CONCAT(x, y) algoim_CONCAT2(x, y) template <int... N>
#define algoim_spark_alloc(T, ...) SparkStack<T> algoim_CONCAT(spark_alloc_var_, __LINE__)(__VA_ARGS__) explicit SparkStack(xarray<T, N>&... a)
#define algoim_spark_alloc_def(T, val, ...) SparkStack<T> algoim_CONCAT(spark_alloc_var_, __LINE__)(val, __VA_ARGS__) {
#define algoim_spark_alloc_vec(T, ptr, ext) SparkStack<T> algoim_CONCAT(spark_alloc_var_, __LINE__)(ptr, ext) len_ = (alloc(&a.data_, a.size()) + ...);
}
// Release memory when the SparkStack object goes out of scope
~SparkStack() { pos() -= len_; }
};
#define algoim_CONCAT2(x, y) x##y
#define algoim_CONCAT(x, y) algoim_CONCAT2(x, y)
#define algoim_spark_alloc(T, ...) SparkStack<T> algoim_CONCAT(spark_alloc_var_, __LINE__)(__VA_ARGS__)
#define algoim_spark_alloc_def(T, val, ...) SparkStack<T> algoim_CONCAT(spark_alloc_var_, __LINE__)(val, __VA_ARGS__)
#define algoim_spark_alloc_vec(T, ptr, ext) SparkStack<T> algoim_CONCAT(spark_alloc_var_, __LINE__)(ptr, ext)
} // namespace algoim } // namespace algoim

Loading…
Cancel
Save