Commit e4869845 authored by Giuseppe Ottaviano's avatar Giuseppe Ottaviano Committed by Facebook GitHub Bot

Outline some of Core.h guts

Summary: `Future.h` is widely included; `KeepAliveOrDeferred` and `DeferredExecutor` are not templates, we can move all the definitions to a cpp file.

Reviewed By: yfeldblum, luciang

Differential Revision: D22371899

fbshipit-source-id: 23a77b2e0560d82f6c3006597eef85d11527cb13
parent 64058698
...@@ -20,6 +20,229 @@ namespace folly { ...@@ -20,6 +20,229 @@ namespace folly {
namespace futures { namespace futures {
namespace detail { namespace detail {
void UniqueDeleter::operator()(DeferredExecutor* ptr) {
if (ptr) {
ptr->release();
}
}
KeepAliveOrDeferred::KeepAliveOrDeferred() = default;
KeepAliveOrDeferred::KeepAliveOrDeferred(Executor::KeepAlive<> ka)
: storage_{std::move(ka)} {
DCHECK(!isDeferred());
}
KeepAliveOrDeferred::KeepAliveOrDeferred(DeferredWrapper deferred)
: storage_{std::move(deferred)} {}
KeepAliveOrDeferred::KeepAliveOrDeferred(KeepAliveOrDeferred&& other) noexcept
: storage_{std::move(other.storage_)} {}
KeepAliveOrDeferred::~KeepAliveOrDeferred() = default;
KeepAliveOrDeferred& KeepAliveOrDeferred::operator=(
KeepAliveOrDeferred&& other) {
storage_ = std::move(other.storage_);
return *this;
}
DeferredExecutor* KeepAliveOrDeferred::getDeferredExecutor() const {
if (!isDeferred()) {
return nullptr;
}
return asDeferred().get();
}
Executor* KeepAliveOrDeferred::getKeepAliveExecutor() const {
if (isDeferred()) {
return nullptr;
}
return asKeepAlive().get();
}
Executor::KeepAlive<> KeepAliveOrDeferred::stealKeepAlive() && {
if (isDeferred()) {
return Executor::KeepAlive<>{};
}
return std::move(asKeepAlive());
}
std::unique_ptr<DeferredExecutor, UniqueDeleter>
KeepAliveOrDeferred::stealDeferred() && {
if (!isDeferred()) {
return std::unique_ptr<DeferredExecutor, UniqueDeleter>{};
}
return std::move(asDeferred());
}
bool KeepAliveOrDeferred::isDeferred() const {
return boost::get<DeferredWrapper>(&storage_) != nullptr;
}
bool KeepAliveOrDeferred::isKeepAlive() const {
return !isDeferred();
}
KeepAliveOrDeferred KeepAliveOrDeferred::copy() const {
if (isDeferred()) {
if (auto def = getDeferredExecutor()) {
return KeepAliveOrDeferred{def->copy()};
} else {
return KeepAliveOrDeferred{};
}
} else {
return KeepAliveOrDeferred{asKeepAlive()};
}
}
/* explicit */ KeepAliveOrDeferred::operator bool() const {
return getDeferredExecutor() || getKeepAliveExecutor();
}
Executor::KeepAlive<>& KeepAliveOrDeferred::asKeepAlive() {
return boost::get<Executor::KeepAlive<>>(storage_);
}
const Executor::KeepAlive<>& KeepAliveOrDeferred::asKeepAlive() const {
return boost::get<Executor::KeepAlive<>>(storage_);
}
DeferredWrapper& KeepAliveOrDeferred::asDeferred() {
return boost::get<DeferredWrapper>(storage_);
}
const DeferredWrapper& KeepAliveOrDeferred::asDeferred() const {
return boost::get<DeferredWrapper>(storage_);
}
void DeferredExecutor::addFrom(
Executor::KeepAlive<>&& completingKA,
Executor::KeepAlive<>::KeepAliveFunc func) {
auto state = state_.load(std::memory_order_acquire);
if (state == State::DETACHED) {
return;
}
// If we are completing on the current executor, call inline, otherwise
// add
auto addWithInline =
[&](Executor::KeepAlive<>::KeepAliveFunc&& addFunc) mutable {
if (completingKA.get() == executor_.get()) {
addFunc(std::move(completingKA));
} else {
executor_.copy().add(std::move(addFunc));
}
};
if (state == State::HAS_EXECUTOR) {
addWithInline(std::move(func));
return;
}
DCHECK(state == State::EMPTY);
func_ = std::move(func);
if (folly::atomic_compare_exchange_strong_explicit(
&state_,
&state,
State::HAS_FUNCTION,
std::memory_order_release,
std::memory_order_acquire)) {
return;
}
DCHECK(state == State::DETACHED || state == State::HAS_EXECUTOR);
if (state == State::DETACHED) {
std::exchange(func_, nullptr);
return;
}
addWithInline(std::exchange(func_, nullptr));
}
Executor* DeferredExecutor::getExecutor() const {
assert(executor_.get());
return executor_.get();
}
void DeferredExecutor::setExecutor(folly::Executor::KeepAlive<> executor) {
if (nestedExecutors_) {
auto nestedExecutors = std::exchange(nestedExecutors_, nullptr);
for (auto& nestedExecutor : *nestedExecutors) {
assert(nestedExecutor.get());
nestedExecutor.get()->setExecutor(executor.copy());
}
}
executor_ = std::move(executor);
auto state = state_.load(std::memory_order_acquire);
if (state == State::EMPTY &&
folly::atomic_compare_exchange_strong_explicit(
&state_,
&state,
State::HAS_EXECUTOR,
std::memory_order_release,
std::memory_order_acquire)) {
return;
}
DCHECK(state == State::HAS_FUNCTION);
state_.store(State::HAS_EXECUTOR, std::memory_order_release);
executor_.copy().add(std::exchange(func_, nullptr));
}
void DeferredExecutor::setNestedExecutors(
std::vector<DeferredWrapper> executors) {
DCHECK(!nestedExecutors_);
nestedExecutors_ =
std::make_unique<std::vector<DeferredWrapper>>(std::move(executors));
}
void DeferredExecutor::detach() {
if (nestedExecutors_) {
auto nestedExecutors = std::exchange(nestedExecutors_, nullptr);
for (auto& nestedExecutor : *nestedExecutors) {
assert(nestedExecutor.get());
nestedExecutor.get()->detach();
}
}
auto state = state_.load(std::memory_order_acquire);
if (state == State::EMPTY &&
folly::atomic_compare_exchange_strong_explicit(
&state_,
&state,
State::DETACHED,
std::memory_order_release,
std::memory_order_acquire)) {
return;
}
DCHECK(state == State::HAS_FUNCTION);
state_.store(State::DETACHED, std::memory_order_release);
std::exchange(func_, nullptr);
}
DeferredWrapper DeferredExecutor::copy() {
acquire();
return DeferredWrapper(this);
}
/* static */ DeferredWrapper DeferredExecutor::create() {
return DeferredWrapper(new DeferredExecutor{});
}
DeferredExecutor::DeferredExecutor() {}
bool DeferredExecutor::acquire() {
auto keepAliveCount = keepAliveCount_.fetch_add(1, std::memory_order_relaxed);
DCHECK(keepAliveCount > 0);
return true;
}
void DeferredExecutor::release() {
auto keepAliveCount = keepAliveCount_.fetch_sub(1, std::memory_order_acq_rel);
DCHECK(keepAliveCount > 0);
if (keepAliveCount == 1) {
delete this;
}
}
#if FOLLY_USE_EXTERN_FUTURE_UNIT #if FOLLY_USE_EXTERN_FUTURE_UNIT
template class Core<folly::Unit>; template class Core<folly::Unit>;
#endif #endif
......
...@@ -91,87 +91,41 @@ using DeferredWrapper = std::unique_ptr<DeferredExecutor, UniqueDeleter>; ...@@ -91,87 +91,41 @@ using DeferredWrapper = std::unique_ptr<DeferredExecutor, UniqueDeleter>;
*/ */
class KeepAliveOrDeferred { class KeepAliveOrDeferred {
public: public:
KeepAliveOrDeferred(Executor::KeepAlive<> ka) : storage_{std::move(ka)} { KeepAliveOrDeferred();
DCHECK(!isDeferred()); /* implicit */ KeepAliveOrDeferred(Executor::KeepAlive<> ka);
} /* implicit */ KeepAliveOrDeferred(DeferredWrapper deferred);
KeepAliveOrDeferred(KeepAliveOrDeferred&& other) noexcept;
KeepAliveOrDeferred(DeferredWrapper deferred)
: storage_{std::move(deferred)} {}
KeepAliveOrDeferred() {}
~KeepAliveOrDeferred() {} ~KeepAliveOrDeferred();
KeepAliveOrDeferred(KeepAliveOrDeferred&& other) KeepAliveOrDeferred& operator=(KeepAliveOrDeferred&& other);
: storage_{std::move(other.storage_)} {}
KeepAliveOrDeferred& operator=(KeepAliveOrDeferred&& other) {
storage_ = std::move(other.storage_);
return *this;
}
DeferredExecutor* getDeferredExecutor() const { DeferredExecutor* getDeferredExecutor() const;
if (!isDeferred()) {
return nullptr;
}
return asDeferred().get();
}
Executor* getKeepAliveExecutor() const { Executor* getKeepAliveExecutor() const;
if (isDeferred()) {
return nullptr;
}
return asKeepAlive().get();
}
Executor::KeepAlive<> stealKeepAlive() && { Executor::KeepAlive<> stealKeepAlive() &&;
if (isDeferred()) {
return Executor::KeepAlive<>{};
}
return std::move(asKeepAlive());
}
std::unique_ptr<DeferredExecutor, UniqueDeleter> stealDeferred() && { std::unique_ptr<DeferredExecutor, UniqueDeleter> stealDeferred() &&;
if (!isDeferred()) {
return std::unique_ptr<DeferredExecutor, UniqueDeleter>{};
}
return std::move(asDeferred());
}
bool isDeferred() const { bool isDeferred() const;
return boost::get<DeferredWrapper>(&storage_) != nullptr;
}
bool isKeepAlive() const { bool isKeepAlive() const;
return !isDeferred();
}
KeepAliveOrDeferred copy() const; KeepAliveOrDeferred copy() const;
explicit operator bool() const { explicit operator bool() const;
return getDeferredExecutor() || getKeepAliveExecutor();
}
private: private:
boost::variant<DeferredWrapper, Executor::KeepAlive<>> storage_;
friend class DeferredExecutor; friend class DeferredExecutor;
Executor::KeepAlive<>& asKeepAlive() { Executor::KeepAlive<>& asKeepAlive();
return boost::get<Executor::KeepAlive<>>(storage_); const Executor::KeepAlive<>& asKeepAlive() const;
}
const Executor::KeepAlive<>& asKeepAlive() const { DeferredWrapper& asDeferred();
return boost::get<Executor::KeepAlive<>>(storage_); const DeferredWrapper& asDeferred() const;
}
DeferredWrapper& asDeferred() { boost::variant<DeferredWrapper, Executor::KeepAlive<>> storage_;
return boost::get<DeferredWrapper>(storage_);
}
const DeferredWrapper& asDeferred() const {
return boost::get<DeferredWrapper>(storage_);
}
}; };
/** /**
...@@ -186,135 +140,31 @@ class DeferredExecutor final { ...@@ -186,135 +140,31 @@ class DeferredExecutor final {
// * store func until an executor is set otherwise // * store func until an executor is set otherwise
void addFrom( void addFrom(
Executor::KeepAlive<>&& completingKA, Executor::KeepAlive<>&& completingKA,
Executor::KeepAlive<>::KeepAliveFunc func) { Executor::KeepAlive<>::KeepAliveFunc func);
auto state = state_.load(std::memory_order_acquire);
if (state == State::DETACHED) {
return;
}
// If we are completing on the current executor, call inline, otherwise Executor* getExecutor() const;
// add
auto addWithInline =
[&](Executor::KeepAlive<>::KeepAliveFunc&& addFunc) mutable {
if (completingKA.get() == executor_.get()) {
addFunc(std::move(completingKA));
} else {
executor_.copy().add(std::move(addFunc));
}
};
if (state == State::HAS_EXECUTOR) {
addWithInline(std::move(func));
return;
}
DCHECK(state == State::EMPTY);
func_ = std::move(func);
if (folly::atomic_compare_exchange_strong_explicit(
&state_,
&state,
State::HAS_FUNCTION,
std::memory_order_release,
std::memory_order_acquire)) {
return;
}
DCHECK(state == State::DETACHED || state == State::HAS_EXECUTOR);
if (state == State::DETACHED) {
std::exchange(func_, nullptr);
return;
}
addWithInline(std::exchange(func_, nullptr));
}
Executor* getExecutor() const { void setExecutor(folly::Executor::KeepAlive<> executor);
assert(executor_.get());
return executor_.get();
}
void setExecutor(folly::Executor::KeepAlive<> executor) {
if (nestedExecutors_) {
auto nestedExecutors = std::exchange(nestedExecutors_, nullptr);
for (auto& nestedExecutor : *nestedExecutors) {
assert(nestedExecutor.get());
nestedExecutor.get()->setExecutor(executor.copy());
}
}
executor_ = std::move(executor);
auto state = state_.load(std::memory_order_acquire);
if (state == State::EMPTY &&
folly::atomic_compare_exchange_strong_explicit(
&state_,
&state,
State::HAS_EXECUTOR,
std::memory_order_release,
std::memory_order_acquire)) {
return;
}
DCHECK(state == State::HAS_FUNCTION); void setNestedExecutors(std::vector<DeferredWrapper> executors);
state_.store(State::HAS_EXECUTOR, std::memory_order_release);
executor_.copy().add(std::exchange(func_, nullptr));
}
void setNestedExecutors(std::vector<DeferredWrapper> executors) { void detach();
DCHECK(!nestedExecutors_);
nestedExecutors_ =
std::make_unique<std::vector<DeferredWrapper>>(std::move(executors));
}
void detach() { DeferredWrapper copy();
if (nestedExecutors_) {
auto nestedExecutors = std::exchange(nestedExecutors_, nullptr);
for (auto& nestedExecutor : *nestedExecutors) {
assert(nestedExecutor.get());
nestedExecutor.get()->detach();
}
}
auto state = state_.load(std::memory_order_acquire);
if (state == State::EMPTY &&
folly::atomic_compare_exchange_strong_explicit(
&state_,
&state,
State::DETACHED,
std::memory_order_release,
std::memory_order_acquire)) {
return;
}
DCHECK(state == State::HAS_FUNCTION); static DeferredWrapper create();
state_.store(State::DETACHED, std::memory_order_release);
std::exchange(func_, nullptr);
}
DeferredWrapper copy() {
acquire();
return DeferredWrapper(this);
}
static DeferredWrapper create() {
return DeferredWrapper(new DeferredExecutor{});
}
private: private:
DeferredExecutor() {}
friend class UniqueDeleter; friend class UniqueDeleter;
bool acquire() { DeferredExecutor();
auto keepAliveCount =
keepAliveCount_.fetch_add(1, std::memory_order_relaxed);
DCHECK(keepAliveCount > 0);
return true;
}
void release() { bool acquire();
auto keepAliveCount =
keepAliveCount_.fetch_sub(1, std::memory_order_acq_rel); void release();
DCHECK(keepAliveCount > 0);
if (keepAliveCount == 1) {
delete this;
}
}
enum class State { EMPTY, HAS_FUNCTION, HAS_EXECUTOR, DETACHED }; enum class State { EMPTY, HAS_FUNCTION, HAS_EXECUTOR, DETACHED };
std::atomic<State> state_{State::EMPTY}; std::atomic<State> state_{State::EMPTY};
Executor::KeepAlive<>::KeepAliveFunc func_; Executor::KeepAlive<>::KeepAliveFunc func_;
folly::Executor::KeepAlive<> executor_; folly::Executor::KeepAlive<> executor_;
...@@ -322,24 +172,6 @@ class DeferredExecutor final { ...@@ -322,24 +172,6 @@ class DeferredExecutor final {
std::atomic<ssize_t> keepAliveCount_{1}; std::atomic<ssize_t> keepAliveCount_{1};
}; };
inline void UniqueDeleter::operator()(DeferredExecutor* ptr) {
if (ptr) {
ptr->release();
}
}
inline KeepAliveOrDeferred KeepAliveOrDeferred::copy() const {
if (isDeferred()) {
if (auto def = getDeferredExecutor()) {
return KeepAliveOrDeferred{def->copy()};
} else {
return KeepAliveOrDeferred{};
}
} else {
return KeepAliveOrDeferred{asKeepAlive()};
}
}
/// The shared state object for Future and Promise. /// The shared state object for Future and Promise.
/// ///
/// Nomenclature: /// Nomenclature:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment