2 * Copyright 2017 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
25 #include <folly/Executor.h>
26 #include <folly/Function.h>
27 #include <folly/MicroSpinLock.h>
28 #include <folly/Optional.h>
29 #include <folly/ScopeGuard.h>
30 #include <folly/Try.h>
31 #include <folly/Utility.h>
32 #include <folly/futures/FutureException.h>
33 #include <folly/futures/detail/FSM.h>
34 #include <folly/portability/BitsFunctexcept.h>
36 #include <folly/io/async/Request.h>
49 This state machine is fairly self-explanatory. The most important bit is
50 that the callback is only executed on the transition from Armed to Done,
51 and that transition can happen immediately after transitioning from Only*
52 to Armed, if it is active (the usual case).
54 enum class State : uint8_t {
62 /// The shared state object for Future and Promise.
63 /// Some methods must only be called by either the Future thread or the
64 /// Promise thread. The Future thread is the thread that currently "owns" the
65 /// Future and its callback-related operations, and the Promise thread is
66 /// likewise the thread that currently "owns" the Promise and its
67 /// result-related operations. Also, Futures own interruption, Promises own
68 /// interrupt handlers. Unfortunately, there are things that users can do to
69 /// break this, and we can't detect that. However if they follow move
70 /// semantics religiously wrt threading, they should be ok.
72 /// It's worth pointing out that Futures and/or Promises can and usually will
73 /// migrate between threads, though this usually happens within the API code.
74 /// For example, an async operation will probably make a Promise, grab its
75 /// Future, then move the Promise into another thread that will eventually
76 /// fulfill it. With executors and via, this gets slightly more complicated at
77 /// first blush, but it's the same principle. In general, as long as the user
78 /// doesn't access a Future or Promise object from more than one thread at a
79 /// time there won't be any problems.
82 static_assert(!std::is_void<T>::value,
83 "void futures are not supported. Use Unit instead.");
85 /// This must be heap-constructed. There's probably a way to enforce that in
86 /// code but since this is just internal detail code and I don't know how
87 /// off-hand, I'm punting.
88 Core() : result_(), fsm_(State::Start), attached_(2) {}
90 explicit Core(Try<T>&& t)
91 : result_(std::move(t)),
92 fsm_(State::OnlyResult),
95 template <typename... Args>
96 explicit Core(in_place_t, Args&&... args) noexcept(
97 std::is_nothrow_constructible<T, Args&&...>::value)
98 : result_(in_place, in_place, std::forward<Args>(args)...),
99 fsm_(State::OnlyResult),
103 DCHECK(attached_ == 0);
107 Core(Core const&) = delete;
108 Core& operator=(Core const&) = delete;
110 // not movable (see comment in the implementation of Future::then)
111 Core(Core&&) noexcept = delete;
112 Core& operator=(Core&&) = delete;
114 /// May call from any thread
115 bool hasResult() const noexcept {
116 switch (fsm_.getState()) {
117 case State::OnlyResult:
128 /// May call from any thread
129 bool ready() const noexcept {
133 /// May call from any thread
138 throwFutureNotReady();
142 /// Call only from Future thread.
143 template <typename F>
144 void setCallback(F&& func) {
145 bool transitionToArmed = false;
146 auto setCallback_ = [&]{
147 context_ = RequestContext::saveContext();
148 callback_ = std::forward<F>(func);
153 FSM_UPDATE(fsm_, State::OnlyCallback, setCallback_);
156 case State::OnlyResult:
157 FSM_UPDATE(fsm_, State::Armed, setCallback_);
158 transitionToArmed = true;
161 case State::OnlyCallback:
164 std::__throw_logic_error("setCallback called twice");
167 // we could always call this, it is an optimization to only call it when
168 // it might be needed.
169 if (transitionToArmed) {
174 /// Call only from Promise thread
175 void setResult(Try<T>&& t) {
176 bool transitionToArmed = false;
177 auto setResult_ = [&]{ result_ = std::move(t); };
180 FSM_UPDATE(fsm_, State::OnlyResult, setResult_);
183 case State::OnlyCallback:
184 FSM_UPDATE(fsm_, State::Armed, setResult_);
185 transitionToArmed = true;
188 case State::OnlyResult:
191 std::__throw_logic_error("setResult called twice");
194 if (transitionToArmed) {
199 /// Called by a destructing Future (in the Future thread, by definition)
200 void detachFuture() {
205 /// Called by a destructing Promise (in the Promise thread, by definition)
206 void detachPromise() {
207 // detachPromise() and setResult() should never be called in parallel
208 // so we don't need to protect this.
209 if (UNLIKELY(!result_)) {
210 setResult(Try<T>(exception_wrapper(BrokenPromise(typeid(T).name()))));
215 /// May call from any thread
217 active_.store(false, std::memory_order_release);
220 /// May call from any thread
222 active_.store(true, std::memory_order_release);
226 /// May call from any thread
227 bool isActive() { return active_.load(std::memory_order_acquire); }
229 /// Call only from Future thread
230 void setExecutor(Executor* x, int8_t priority = Executor::MID_PRI) {
231 if (!executorLock_.try_lock()) {
232 executorLock_.lock();
235 priority_ = priority;
236 executorLock_.unlock();
239 void setExecutorNoLock(Executor* x, int8_t priority = Executor::MID_PRI) {
241 priority_ = priority;
244 Executor* getExecutor() {
248 /// Call only from Future thread
249 void raise(exception_wrapper e) {
250 if (!interruptLock_.try_lock()) {
251 interruptLock_.lock();
253 if (!interrupt_ && !hasResult()) {
254 interrupt_ = std::make_unique<exception_wrapper>(std::move(e));
255 if (interruptHandler_) {
256 interruptHandler_(*interrupt_);
259 interruptLock_.unlock();
262 std::function<void(exception_wrapper const&)> getInterruptHandler() {
263 if (!interruptHandlerSet_.load(std::memory_order_acquire)) {
266 if (!interruptLock_.try_lock()) {
267 interruptLock_.lock();
269 auto handler = interruptHandler_;
270 interruptLock_.unlock();
274 /// Call only from Promise thread
275 void setInterruptHandler(std::function<void(exception_wrapper const&)> fn) {
276 if (!interruptLock_.try_lock()) {
277 interruptLock_.lock();
283 setInterruptHandlerNoLock(std::move(fn));
286 interruptLock_.unlock();
289 void setInterruptHandlerNoLock(
290 std::function<void(exception_wrapper const&)> fn) {
291 interruptHandlerSet_.store(true, std::memory_order_relaxed);
292 interruptHandler_ = std::move(fn);
296 // Helper class that stores a pointer to the `Core` object and calls
297 // `derefCallback` and `detachOne` in the destructor.
298 class CoreAndCallbackReference {
300 explicit CoreAndCallbackReference(Core* core) noexcept : core_(core) {}
302 ~CoreAndCallbackReference() {
304 core_->derefCallback();
309 CoreAndCallbackReference(CoreAndCallbackReference const& o) = delete;
310 CoreAndCallbackReference& operator=(CoreAndCallbackReference const& o) =
313 CoreAndCallbackReference(CoreAndCallbackReference&& o) noexcept {
314 std::swap(core_, o.core_);
317 Core* getCore() const noexcept {
322 Core* core_{nullptr};
325 void maybeCallback() {
328 if (active_.load(std::memory_order_acquire)) {
329 FSM_UPDATE2(fsm_, State::Done, []{}, [this]{ this->doCallback(); });
339 Executor* x = executor_;
340 // initialize, solely to appease clang's -Wconditional-uninitialized
343 if (!executorLock_.try_lock()) {
344 executorLock_.lock();
347 priority = priority_;
348 executorLock_.unlock();
352 exception_wrapper ew;
353 // We need to reset `callback_` after it was executed (which can happen
354 // through the executor or, if `Executor::add` throws, below). The
355 // executor might discard the function without executing it (now or
356 // later), in which case `callback_` also needs to be reset.
357 // The `Core` has to be kept alive throughout that time, too. Hence we
358 // increment `attached_` and `callbackReferences_` by two, and construct
359 // exactly two `CoreAndCallbackReference` objects, which call
360 // `derefCallback` and `detachOne` in their destructor. One will guard
361 // this scope, the other one will guard the lambda passed to the executor.
363 callbackReferences_ += 2;
364 CoreAndCallbackReference guard_local_scope(this);
365 CoreAndCallbackReference guard_lambda(this);
367 if (LIKELY(x->getNumPriorities() == 1)) {
368 x->add([core_ref = std::move(guard_lambda)]() mutable {
369 auto cr = std::move(core_ref);
370 Core* const core = cr.getCore();
371 RequestContextScopeGuard rctx(core->context_);
372 core->callback_(std::move(*core->result_));
376 [core_ref = std::move(guard_lambda)]() mutable {
377 auto cr = std::move(core_ref);
378 Core* const core = cr.getCore();
379 RequestContextScopeGuard rctx(core->context_);
380 core->callback_(std::move(*core->result_));
384 } catch (const std::exception& e) {
385 ew = exception_wrapper(std::current_exception(), e);
387 ew = exception_wrapper(std::current_exception());
390 RequestContextScopeGuard rctx(context_);
391 result_ = Try<T>(std::move(ew));
392 callback_(std::move(*result_));
400 RequestContextScopeGuard rctx(context_);
401 callback_(std::move(*result_));
406 auto a = attached_--;
413 void derefCallback() {
414 if (--callbackReferences_ == 0) {
419 folly::Function<void(Try<T>&&)> callback_;
420 // place result_ next to increase the likelihood that the value will be
421 // contained entirely in one cache line
422 folly::Optional<Try<T>> result_;
424 std::atomic<unsigned char> attached_;
425 std::atomic<unsigned char> callbackReferences_{0};
426 std::atomic<bool> active_ {true};
427 std::atomic<bool> interruptHandlerSet_ {false};
428 folly::MicroSpinLock interruptLock_ {0};
429 folly::MicroSpinLock executorLock_ {0};
430 int8_t priority_ {-1};
431 Executor* executor_ {nullptr};
432 std::shared_ptr<RequestContext> context_ {nullptr};
433 std::unique_ptr<exception_wrapper> interrupt_ {};
434 std::function<void(exception_wrapper const&)> interruptHandler_ {nullptr};
437 template <template <typename...> class T, typename... Ts>
438 void collectVariadicHelper(const std::shared_ptr<T<Ts...>>& /* ctx */) {
443 template <typename...> class T,
447 void collectVariadicHelper(const std::shared_ptr<T<Ts...>>& ctx,
448 THead&& head, TTail&&... tail) {
449 using ValueType = typename std::decay<THead>::type::value_type;
450 std::forward<THead>(head).setCallback_([ctx](Try<ValueType>&& t) {
451 ctx->template setPartialResult<
453 sizeof...(Ts) - sizeof...(TTail)-1>(t);
455 // template tail-recursion
456 collectVariadicHelper(ctx, std::forward<TTail>(tail)...);
459 } // namespace detail
460 } // namespace futures