Files
redux-scraper/sorbet/rbi/gems/concurrent-ruby-edge@0.7.1.rbi
Dylan Knutson 20aa7871ea init sorbet
2025-01-01 01:14:26 +00:00

5081 lines
200 KiB
Ruby
Generated

# typed: true
# DO NOT EDIT MANUALLY
# This is an autogenerated file for types exported from the `concurrent-ruby-edge` gem.
# Please instead update this file by running `bin/tapioca gem concurrent-ruby-edge`.
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/version.rb#1
module Concurrent
extend ::Logger::Severity
private
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/tvar.rb#139
def abort_transaction; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/tvar.rb#82
def atomically; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/dataflow.rb#56
def call_dataflow(method, executor, *inputs, &block); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/dataflow.rb#34
def dataflow(*inputs, &block); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/dataflow.rb#44
def dataflow!(*inputs, &block); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/dataflow.rb#39
def dataflow_with(executor, *inputs, &block); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/dataflow.rb#49
def dataflow_with!(executor, *inputs, &block); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/tvar.rb#144
def leave_transaction; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/utility/monotonic_time.rb#15
def monotonic_time(unit = T.unsafe(nil)); end
class << self
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/tvar.rb#139
def abort_transaction; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/tvar.rb#82
def atomically; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/utility/processor_counter.rb#194
def available_processor_count; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/dataflow.rb#56
def call_dataflow(method, executor, *inputs, &block); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/utility/processor_counter.rb#209
def cpu_quota; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/utility/processor_counter.rb#217
def cpu_shares; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/concern/logging.rb#37
def create_simple_logger(level = T.unsafe(nil), output = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/concern/logging.rb#69
def create_stdlib_logger(level = T.unsafe(nil), output = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/dataflow.rb#34
def dataflow(*inputs, &block); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/dataflow.rb#44
def dataflow!(*inputs, &block); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/dataflow.rb#39
def dataflow_with(executor, *inputs, &block); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/dataflow.rb#49
def dataflow_with!(executor, *inputs, &block); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/configuration.rb#48
def disable_at_exit_handlers!; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/configuration.rb#83
def executor(executor_identifier); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/configuration.rb#55
def global_fast_executor; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/configuration.rb#66
def global_immediate_executor; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/configuration.rb#62
def global_io_executor; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/concern/logging.rb#109
def global_logger; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/concern/logging.rb#113
def global_logger=(value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/configuration.rb#73
def global_timer_set; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/tvar.rb#144
def leave_transaction; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/utility/monotonic_time.rb#15
def monotonic_time(unit = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/atomic/lock_local_var.rb#7
def mutex_owned_per_thread?; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/configuration.rb#87
def new_fast_executor(opts = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/configuration.rb#98
def new_io_executor(opts = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/utility/processor_counter.rb#181
def physical_processor_count; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/utility/processor_counter.rb#160
def processor_count; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/utility/processor_counter.rb#142
def processor_counter; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/concern/logging.rb#63
def use_simple_logger(level = T.unsafe(nil), output = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/concern/logging.rb#96
def use_stdlib_logger(level = T.unsafe(nil), output = T.unsafe(nil)); end
end
end
# {include:file:docs-source/actor/main.md}
#
# @api Actor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor.rb#17
module Concurrent::Actor
class << self
# @api Actor
# @return [Reference, nil] current executing actor if any
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor.rb#34
def current; end
# A root actor, a default parent of all actors spawned outside an actor
#
# @api Actor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor.rb#49
def root; end
# Spawns a new actor. {Concurrent::Actor::AbstractContext.spawn} allows to omit class parameter.
# To see the list of available options see {Core#initialize}
#
# @api Actor
# @example by class and name
# Actor.spawn(AdHoc, :ping1) { -> message { message } }
# @example by option hash
# inc2 = Actor.spawn(class: AdHoc,
# name: 'increment by 2',
# args: [2],
# executor: Concurrent.global_io_executor) do |increment_by|
# lambda { |number| number + increment_by }
# end
# inc2.ask!(2) # => 4
# @param block for context_class instantiation
# @param args see {.to_spawn_options}
# @return [Reference] never the actual actor
# @see Concurrent::Actor::AbstractContext.spawn
# @see Core#initialize
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor.rb#72
def spawn(*args, &block); end
# as {.spawn} but it'll block until actor is initialized or it'll raise exception on error
#
# @api Actor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor.rb#85
def spawn!(*args, &block); end
# @api Actor
# @overload to_spawn_options
# @overload to_spawn_options
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor.rb#96
def to_spawn_options(*args); end
end
end
# New actor is defined by subclassing {RestartingContext}, {Context} and defining its abstract methods.
# {AbstractContext} can be subclassed directly to implement more specific behaviour see {Root} implementation.
#
# - {Context}
#
# > {include:Actor::Context}
#
# - {RestartingContext}.
#
# > {include:Actor::RestartingContext}
#
# Example of ac actor definition:
#
# {include:file:docs-source/actor/define.out.rb}
#
# See methods of {AbstractContext} what else can be tweaked, e.g {AbstractContext#default_reference_class}
#
# @abstract implement {AbstractContext#on_message} and {AbstractContext#behaviour_definition}
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#26
class Concurrent::Actor::AbstractContext
include ::Concurrent::Actor::TypeCheck
include ::Concurrent::Actor::PublicDelegations
include ::Logger::Severity
include ::Concurrent::Actor::InternalDelegations
# tell self a message
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#94
def <<(message); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#98
def ask(message); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#98
def ask!(message); end
# @raise [NotImplementedError]
# @return [Array<Array(Behavior::Abstract, Array<Object>)>]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#72
def behaviour_definition; end
# Returns the value of attribute core.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#30
def core; end
# Defines an actor responsible for dead letters. Any rejected message send
# with {Reference#tell} is sent there, a message with future is considered
# already monitored for failures. Default behaviour is to use
# {AbstractContext#dead_letter_routing} of the parent, so if no
# {AbstractContext#dead_letter_routing} method is overridden in
# parent-chain the message ends up in `Actor.root.dead_letter_routing`
# agent which will log warning.
#
# @return [Reference]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#67
def dead_letter_routing; end
# override to se different default executor, e.g. to change it to global_operation_pool
#
# @return [Executor]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#89
def default_executor; end
# override if different class for reference is needed
#
# @return [CLass] descendant of {Reference}
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#83
def default_reference_class; end
# @return [Envelope] current envelope, accessible inside #on_message processing
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#77
def envelope; end
# @api private
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#46
def on_envelope(envelope); end
# override to add custom code invocation on internal events like `:terminated`, `:resumed`, `anError`.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#42
def on_event(event); end
# @abstract override to define Actor's behaviour
# @note self should not be returned (or sent to other actors), {#reference} should be used
# instead
# @param message [Object]
# @raise [NotImplementedError]
# @return [Object] a result which will be used to set the Future supplied to Reference#ask
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#37
def on_message(message); end
# if you want to pass the message to next behaviour, usually
# {Behaviour::ErrorsOnUnknownMessage}
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#55
def pass; end
# tell self a message
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#94
def tell(message); end
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#128
def initialize_core(core); end
class << self
# Behaves as {Concurrent::Actor.spawn} but :class is auto-inserted based on receiver so it can be omitted.
#
# @example by class and name
# AdHoc.spawn(:ping1) { -> message { message } }
# @example by option hash
# inc2 = AdHoc.spawn(name: 'increment by 2',
# args: [2],
# executor: Concurrent.configuration.global_task_pool) do |increment_by|
# lambda { |number| number + increment_by }
# end
# inc2.ask!(2) # => 4
# @see Concurrent::Actor.spawn
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#117
def spawn(name_or_opts, *args, &block); end
# behaves as {Concurrent::Actor.spawn!} but :class is auto-inserted based on receiver so it can be omitted.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#122
def spawn!(name_or_opts, *args, &block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#132
def to_spawn_options(name_or_opts, *args); end
end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/errors.rb#7
class Concurrent::Actor::ActorTerminated < ::Concurrent::Actor::Error
include ::Concurrent::Actor::TypeCheck
# @return [ActorTerminated] a new instance of ActorTerminated
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/errors.rb#12
def initialize(reference); end
# Returns the value of attribute reference.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/errors.rb#10
def reference; end
end
# Actors have modular architecture, which is achieved by combining a light core with chain of
# behaviours. Each message or internal event propagates through the chain allowing the
# behaviours react based on their responsibility.
#
# - {Behaviour::Linking}:
#
# > {include:Actor::Behaviour::Linking}
#
# - {Behaviour::Awaits}:
#
# > {include:Actor::Behaviour::Awaits}
#
# - {Behaviour::Pausing}:
#
# > {include:Actor::Behaviour::Pausing}
#
# - {Behaviour::Supervising}:
#
# > {include:Actor::Behaviour::Supervising}
#
# - {Behaviour::Supervising}:
#
# > {include:Actor::Behaviour::Supervising}
#
# - {Behaviour::ExecutesContext}:
#
# > {include:Actor::Behaviour::ExecutesContext}
#
# - {Behaviour::ErrorsOnUnknownMessage}:
#
# > {include:Actor::Behaviour::ErrorsOnUnknownMessage}
#
# - {Behaviour::Termination}:
#
# > {include:Actor::Behaviour::Termination}
#
# - {Behaviour::RemovesChild}:
#
# > {include:Actor::Behaviour::RemovesChild}
#
# If needed new behaviours can be added, or old one removed to get required behaviour.
#
# - {Context} uses
# {include:Actor::Behaviour.basic_behaviour_definition}
#
# - {RestartingContext} uses
# {include:Actor::Behaviour.restarting_behaviour_definition}
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour.rb#51
module Concurrent::Actor::Behaviour
class << self
# @see '' its source code
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour.rb#105
def base(on_error); end
# Array of behaviours and their construction parameters.
#
# [[Behaviour::SetResults, :terminate!],
# [Behaviour::RemovesChild],
# [Behaviour::Termination],
# [Behaviour::Linking],
# [Behaviour::Awaits],
# [Behaviour::ExecutesContext],
# [Behaviour::ErrorsOnUnknownMessage]]
#
# @see '' its source code
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour.rb#77
def basic_behaviour_definition; end
# @see '' its source code
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour.rb#113
def linking; end
# Array of behaviours and their construction parameters.
#
# [[Behaviour::SetResults, :pause!],
# [Behaviour::RemovesChild],
# [Behaviour::Termination],
# [Behaviour::Linking],
# [Behaviour::Pausing],
# [Behaviour::Supervising, :reset!, :one_for_one],
# [Behaviour::Awaits],
# [Behaviour::ExecutesContext],
# [Behaviour::ErrorsOnUnknownMessage]]
#
# @see '' its source code
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour.rb#96
def restarting_behaviour_definition(handle = T.unsafe(nil), strategy = T.unsafe(nil)); end
# @see '' its source code
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour.rb#118
def supervised; end
# @see '' its source code
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour.rb#123
def supervising(handle = T.unsafe(nil), strategy = T.unsafe(nil)); end
# @see '' its source code
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour.rb#128
def user_messages; end
end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/abstract.rb#8
class Concurrent::Actor::Behaviour::Abstract
include ::Concurrent::Actor::TypeCheck
include ::Concurrent::Actor::PublicDelegations
include ::Logger::Severity
include ::Concurrent::Actor::InternalDelegations
# @return [Abstract] a new instance of Abstract
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/abstract.rb#14
def initialize(core, subsequent, core_options); end
# broadcasts event to all behaviours and context
#
# @see #on_event
# @see AbstractContext#on_event
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/abstract.rb#39
def broadcast(public, event); end
# Returns the value of attribute core.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/abstract.rb#12
def core; end
# override to add extra behaviour
#
# @note super needs to be called not to break the chain
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/abstract.rb#21
def on_envelope(envelope); end
# override to add extra behaviour
#
# @note super needs to be called not to break the chain
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/abstract.rb#32
def on_event(public, event); end
# @param envelope [Envelope] to pass to {#subsequent} behaviour
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/abstract.rb#26
def pass(envelope); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/abstract.rb#43
def reject_envelope(envelope); end
# Returns the value of attribute subsequent.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/abstract.rb#12
def subsequent; end
end
# Accepts `:await` messages. Which allows to wait on Actor to process all previously send
# messages.
#
# actor << :a << :b
# actor.ask(:await).wait # blocks until :a and :b are processed
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/awaits.rb#12
class Concurrent::Actor::Behaviour::Awaits < ::Concurrent::Actor::Behaviour::Abstract
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/awaits.rb#13
def on_envelope(envelope); end
end
# Any message reaching this behaviour is buffered. Only one message is is
# scheduled at any given time. Others are kept in buffer until another one
# can be scheduled. This effectively means that messages handled by
# behaviours before buffer have higher priority and they can be processed
# before messages arriving into buffer. This allows for the processing of
# internal actor messages like (`:link`, `:supervise`) first.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/buffer.rb#13
class Concurrent::Actor::Behaviour::Buffer < ::Concurrent::Actor::Behaviour::Abstract
# @return [Buffer] a new instance of Buffer
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/buffer.rb#14
def initialize(core, subsequent, core_options); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/buffer.rb#20
def on_envelope(envelope); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/buffer.rb#46
def on_event(public, event); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/buffer.rb#37
def process_envelope; end
# Ensures that only one envelope processing is scheduled with #schedule_execution,
# this allows other scheduled blocks to be executed before next envelope processing.
# Simply put this ensures that Core is still responsive to internal calls (like add_child)
# even though the Actor is flooded with messages.
#
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/buffer.rb#30
def process_envelopes?; end
end
# Simply fails when message arrives here. It's usually the last behaviour.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/errors_on_unknown_message.rb#7
class Concurrent::Actor::Behaviour::ErrorsOnUnknownMessage < ::Concurrent::Actor::Behaviour::Abstract
# @raise [UnknownMessage]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/errors_on_unknown_message.rb#8
def on_envelope(envelope); end
end
# Delegates messages and events to {AbstractContext} instance.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/executes_context.rb#7
class Concurrent::Actor::Behaviour::ExecutesContext < ::Concurrent::Actor::Behaviour::Abstract
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/executes_context.rb#8
def on_envelope(envelope); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/executes_context.rb#12
def on_event(public, event); end
end
# Links the actor to other actors and sends actor's events to them,
# like: `:terminated`, `:paused`, `:resumed`, errors, etc.
# Linked actor needs to handle those messages.
#
# listener = AdHoc.spawn name: :listener do
# lambda do |message|
# case message
# when Reference
# if message.ask!(:linked?)
# message << :unlink
# else
# message << :link
# end
# else
# puts "got event #{message.inspect} from #{envelope.sender}"
# end
# end
# end
#
# an_actor = AdHoc.spawn name: :an_actor, supervise: true, behaviour_definition: Behaviour.restarting_behaviour_definition do
# lambda { |message| raise 'failed'}
# end
#
# # link the actor
# listener.ask(an_actor).wait
# an_actor.ask(:fail).wait
# # unlink the actor
# listener.ask(an_actor).wait
# an_actor.ask(:fail).wait
# an_actor << :terminate!
#
# produces only two events, other events happened after unlinking
#
# got event #<RuntimeError: failed> from #<Concurrent::Actor::Reference /an_actor (Concurrent::Actor::Utils::AdHoc)>
# got event :reset from #<Concurrent::Actor::Reference /an_actor (Concurrent::Actor::Utils::AdHoc)>
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/linking.rb#44
class Concurrent::Actor::Behaviour::Linking < ::Concurrent::Actor::Behaviour::Abstract
# @return [Linking] a new instance of Linking
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/linking.rb#45
def initialize(core, subsequent, core_options); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/linking.rb#66
def link(ref); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/linking.rb#51
def on_envelope(envelope); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/linking.rb#76
def on_event(public, event); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/linking.rb#71
def unlink(ref); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour.rb#52
Concurrent::Actor::Behaviour::MESSAGE_PROCESSED = T.let(T.unsafe(nil), Object)
# Allows to pause actors on errors.
# When paused all arriving messages are collected and processed after the actor
# is resumed or reset. Resume will simply continue with next message.
# Reset also reinitialized context.
#
# @note TODO missing example
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#12
class Concurrent::Actor::Behaviour::Pausing < ::Concurrent::Actor::Behaviour::Abstract
# @return [Pausing] a new instance of Pausing
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#13
def initialize(core, subsequent, core_options); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#23
def on_envelope(envelope); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#74
def on_event(public, event); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#45
def pause!(error = T.unsafe(nil)); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#19
def paused?; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#58
def reset!; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#66
def restart!; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#51
def resume!; end
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#82
def do_pause; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#93
def do_reset; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#100
def do_restart; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#87
def do_resume; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#107
def rebuild_context; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#118
def reject_deferred; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/pausing.rb#113
def reschedule_deferred; end
end
# Removes terminated children.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/removes_child.rb#7
class Concurrent::Actor::Behaviour::RemovesChild < ::Concurrent::Actor::Behaviour::Abstract
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/removes_child.rb#8
def on_envelope(envelope); end
end
# Collects returning value and sets the ResolvableFuture in the {Envelope} or error on failure.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/sets_results.rb#7
class Concurrent::Actor::Behaviour::SetResults < ::Concurrent::Actor::Behaviour::Abstract
# @return [SetResults] a new instance of SetResults
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/sets_results.rb#10
def initialize(core, subsequent, core_options, error_strategy); end
# Returns the value of attribute error_strategy.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/sets_results.rb#8
def error_strategy; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/sets_results.rb#15
def on_envelope(envelope); end
end
# Handles supervised actors. Handle configures what to do with failed child: :terminate!, :resume!, :reset!,
# or :restart!. Strategy sets :one_for_one (restarts just failed actor) or :one_for_all (restarts all child actors).
#
# @note TODO missing example
# @note this will change in next version to support supervision trees better
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/supervising.rb#11
class Concurrent::Actor::Behaviour::Supervising < ::Concurrent::Actor::Behaviour::Abstract
# @return [Supervising] a new instance of Supervising
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/supervising.rb#12
def initialize(core, subsequent, core_options, handle, strategy); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/supervising.rb#25
def on_envelope(envelope); end
end
# Handles actor termination. Waits until all its children are terminated,
# can be configured on behaviour initialization.
#
# @note Actor rejects envelopes when terminated.
# @note TODO missing example
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/termination.rb#11
class Concurrent::Actor::Behaviour::Termination < ::Concurrent::Actor::Behaviour::Abstract
# @return [Termination] a new instance of Termination
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/termination.rb#17
def initialize(core, subsequent, core_options, trapping = T.unsafe(nil), terminate_children = T.unsafe(nil)); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/termination.rb#39
def on_envelope(envelope); end
# Terminates the actor. Any Envelope received after termination is rejected.
# Terminates all its children, does not wait until they are terminated.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/termination.rb#64
def terminate!(reason = T.unsafe(nil), envelope = T.unsafe(nil)); end
# Returns the value of attribute terminated.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/termination.rb#15
def terminated; end
# @note Actor rejects envelopes when terminated.
# @return [true, false] if actor is terminated
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/termination.rb#27
def terminated?; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/termination.rb#35
def trapping=(val); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/behaviour/termination.rb#31
def trapping?; end
end
# Basic Context of an Actor. It supports only linking and it simply terminates on error.
# Uses {Behaviour.basic_behaviour_definition}:
#
# @abstract implement {AbstractContext#on_message}
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#152
class Concurrent::Actor::Context < ::Concurrent::Actor::AbstractContext
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#153
def behaviour_definition; end
end
# Core of the actor.
#
# @note Whole class should be considered private. An user should use {Context}s and {Reference}s only.
# @note devel: core should not block on anything, e.g. it cannot wait on children to terminate
# that would eat up all threads in task pool and deadlock
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#13
class Concurrent::Actor::Core < ::Concurrent::Synchronization::LockableObject
include ::Concurrent::Actor::TypeCheck
include ::Logger::Severity
include ::Concurrent::Concern::Logging
# @option opts
# @option opts
# @option opts
# @option opts
# @option opts
# @option opts
# @option opts
# @option opts
# @option opts
# @option opts
# @param block [Proc] for class instantiation
# @param opts [Hash] a customizable set of options
# @return [Core] a new instance of Core
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#50
def initialize(opts = T.unsafe(nil), &block); end
# @api private
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#74
def add_child(child); end
# @api private
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#150
def allocate_context; end
# @param behaviour_class [Class]
# @return [Behaviour::Abstract, nil] based on behaviour_class
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#138
def behaviour(behaviour_class); end
# @param behaviour_class [Class]
# @raise [KeyError] when no behaviour
# @return [Behaviour::Abstract] based on behaviour_class
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#145
def behaviour!(behaviour_class); end
# Returns the value of attribute behaviour_definition.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#35
def behaviour_definition; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#131
def broadcast(public, event); end
# @api private
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#155
def build_context; end
# @return [Array<Reference>] of children actors
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#68
def children; end
# Returns the value of attribute context.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#35
def context; end
# Returns the value of attribute context_class.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#35
def context_class; end
# @see AbstractContext#dead_letter_routing
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#63
def dead_letter_routing; end
# Executor which is used to process messages.
#
# @return [Executor]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#35
def executor; end
# ensures that we are inside of the executor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#102
def guard!; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#108
def log(level, message = T.unsafe(nil), &block); end
# The name of actor instance, it should be uniq (not enforced). Allows easier orientation
# between actor instances.
#
# @return [String]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#35
def name; end
# is executed by Reference scheduling processing of new messages
# can be called from other alternative Reference implementations
#
# @param envelope [Envelope]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#92
def on_envelope(envelope); end
# A parent Actor. When actor is spawned the {Actor.current} becomes its parent.
# When actor is spawned from a thread outside of an actor ({Actor.current} is nil) {Actor.root} is assigned.
#
# @return [Reference, nil]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#58
def parent; end
# Path of this actor. It is used for easier orientation and logging.
# Path is constructed recursively with: `parent.path + self.name` up to a {Actor.root},
# e.g. `/an_actor/its_child`.
#
# @return [String]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#35
def path; end
# @api private
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#161
def process_envelope(envelope); end
# Reference to this actor which can be safely passed around.
#
# @return [Reference]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#35
def reference; end
# @api private
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#82
def remove_child(child); end
# Schedules blocks to be executed on executor sequentially,
# sets Actress.current
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#114
def schedule_execution; end
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#210
def initialize_behaviours(opts); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/core.rb#167
def ns_initialize(opts, &block); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/default_dead_letter_handler.rb#5
class Concurrent::Actor::DefaultDeadLetterHandler < ::Concurrent::Actor::RestartingContext
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/default_dead_letter_handler.rb#6
def on_message(dead_letter); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/envelope.rb#5
class Concurrent::Actor::Envelope
include ::Concurrent::Actor::TypeCheck
# @return [Envelope] a new instance of Envelope
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/envelope.rb#19
def initialize(message, future, sender, address); end
# Returns the value of attribute address.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/envelope.rb#17
def address; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/envelope.rb#34
def address_path; end
# @return [Edge::Future] a future which becomes resolved after message is processed
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/envelope.rb#17
def future; end
# @return [Object] a message
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/envelope.rb#17
def message; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/envelope.rb#38
def reject!(error); end
# @return [Reference, Thread] an actor or thread sending the message
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/envelope.rb#17
def sender; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/envelope.rb#26
def sender_path; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/errors.rb#5
class Concurrent::Actor::Error < ::StandardError; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/internal_delegations.rb#6
module Concurrent::Actor::InternalDelegations
include ::Concurrent::Actor::PublicDelegations
include ::Logger::Severity
# see Core#behaviour
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/internal_delegations.rb#52
def behaviour(behaviour_class); end
# see Core#behaviour!
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/internal_delegations.rb#57
def behaviour!(behaviour_class); end
# @see Core#children
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/internal_delegations.rb#11
def children; end
# @return [AbstractContext]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/internal_delegations.rb#47
def context; end
# @see AbstractContext#dead_letter_routing
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/internal_delegations.rb#37
def dead_letter_routing; end
# delegates to core.log
#
# @see Logging#log
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/internal_delegations.rb#32
def log(level, message = T.unsafe(nil), &block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/internal_delegations.rb#41
def redirect(reference, envelope = T.unsafe(nil)); end
# @see Termination#terminate!
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/internal_delegations.rb#16
def terminate!(reason = T.unsafe(nil)); end
# @return [Boolean]
# @see Termination#terminated?
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/internal_delegations.rb#21
def terminated?; end
end
# Provides publicly expose-able methods from {Core}.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/public_delegations.rb#5
module Concurrent::Actor::PublicDelegations
# @see Core#context_class
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/public_delegations.rb#32
def actor_class; end
# @see Core#context_class
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/public_delegations.rb#32
def context_class; end
# @see Core#executor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/public_delegations.rb#27
def executor; end
# @see Core#name
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/public_delegations.rb#7
def name; end
# @see Core#parent
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/public_delegations.rb#17
def parent; end
# @see Core#path
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/public_delegations.rb#12
def path; end
# @see Core#reference
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/public_delegations.rb#22
def ref; end
# @see Core#reference
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/public_delegations.rb#22
def reference; end
end
# {Reference} is public interface of Actor instances. It is used for sending messages and can
# be freely passed around the application. It also provides some basic information about the actor,
# see {PublicDelegations}.
#
# AdHoc.spawn('printer') { -> message { puts message } }
# # => #<Concurrent::Actor::Reference:0x7fd0d2883218 /printer (Concurrent::Actor::Utils::AdHoc)>
# # ^object_id ^path ^context class
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#14
class Concurrent::Actor::Reference
include ::Concurrent::Actor::TypeCheck
include ::Concurrent::Actor::PublicDelegations
# @return [Reference] a new instance of Reference
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#22
def initialize(core); end
# Sends the message asynchronously to the actor and immediately returns
# `self` (the reference) allowing to chain message telling.
#
# @example
# printer = AdHoc.spawn('printer') { -> message { puts message } }
# printer.tell('ping').tell('pong')
# printer << 'ping' << 'pong'
# # => 'ping'\n'pong'\n'ping'\n'pong'\n
# @param message [Object]
# @return [Reference] self
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#35
def <<(message); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#98
def ==(other); end
# @example
# adder = AdHoc.spawn('adder') { -> message { message + 1 } }
# adder.ask(1).value # => 2
# adder.ask(nil).wait.reason # => #<NoMethodError: undefined method `+' for nil:NilClass>
# @note it's a good practice to use {#tell} whenever possible. Results can be sent back with other messages.
# Ask should be used only for testing and when it returns very shortly. It can lead to deadlock if all threads in
# global_io_executor will block on while asking. It's fine to use it form outside of actors and
# global_io_executor.
# @param message [Object]
# @param future [Promises::Future] to be fulfilled be message's processing result
# @return [Promises::Future] supplied future
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#52
def ask(message, future = T.unsafe(nil)); end
# Sends the message synchronously and blocks until the message
# is processed. Raises on error.
#
# @example
# adder = AdHoc.spawn('adder') { -> message { message + 1 } }
# adder.ask!(1) # => 2
# @note it's a good practice to use {#tell} whenever possible. Results can be sent back with other messages.
# Ask should be used only for testing and when it returns very shortly. It can lead to deadlock if all threads in
# global_io_executor will block on while asking. It's fine to use it form outside of actors and
# global_io_executor.
# @param message [Object]
# @param future [Promises::Future] to be fulfilled be message's processing result
# @raise [Exception] future.reason if future is #rejected?
# @return [Object] message's processing result
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#73
def ask!(message, future = T.unsafe(nil)); end
# @example
# adder = AdHoc.spawn('adder') { -> message { message + 1 } }
# adder.ask(1).value # => 2
# adder.ask(nil).wait.reason # => #<NoMethodError: undefined method `+' for nil:NilClass>
# @note it's a good practice to use {#tell} whenever possible. Results can be sent back with other messages.
# Ask should be used only for testing and when it returns very shortly. It can lead to deadlock if all threads in
# global_io_executor will block on while asking. It's fine to use it form outside of actors and
# global_io_executor.
# @param message [Object]
# @param future [Promises::Future] to be fulfilled be message's processing result
# @return [Promises::Future] supplied future
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#52
def ask_op(message, future = T.unsafe(nil)); end
# @see AbstractContext#dead_letter_routing
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#88
def dead_letter_routing; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#92
def inspect; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#77
def map(messages); end
# behaves as {#tell} when no future and as {#ask} when future
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#82
def message(message, future = T.unsafe(nil)); end
# Sends the message asynchronously to the actor and immediately returns
# `self` (the reference) allowing to chain message telling.
#
# @example
# printer = AdHoc.spawn('printer') { -> message { puts message } }
# printer.tell('ping').tell('pong')
# printer << 'ping' << 'pong'
# # => 'ping'\n'pong'\n'ping'\n'pong'\n
# @param message [Object]
# @return [Reference] self
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#35
def tell(message); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#92
def to_s; end
private
# Returns the value of attribute core.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/reference.rb#18
def core; end
end
# Context of an Actor for robust systems. It supports supervision, linking, pauses on error.
# Uses {Behaviour.restarting_behaviour_definition}
#
# @abstract implement {AbstractContext#on_message}
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#162
class Concurrent::Actor::RestartingContext < ::Concurrent::Actor::AbstractContext
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/context.rb#163
def behaviour_definition; end
end
# implements the root actor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/root.rb#7
class Concurrent::Actor::Root < ::Concurrent::Actor::AbstractContext
# @return [Root] a new instance of Root
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/root.rb#9
def initialize; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/root.rb#33
def behaviour_definition; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/root.rb#29
def dead_letter_routing; end
# to allow spawning of new actors, spawn needs to be called inside the parent Actor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/root.rb#18
def on_message(message); end
end
# taken from Algebrick
# supplies type-checking helpers whenever included
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/type_check.rb#6
module Concurrent::Actor::TypeCheck
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/type_check.rb#33
def Child!(value, *types); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/type_check.rb#28
def Child?(value, *types); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/type_check.rb#22
def Match!(value, *types); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/type_check.rb#18
def Match?(value, *types); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/type_check.rb#12
def Type!(value, *types); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/type_check.rb#8
def Type?(value, *types); end
class << self
# @raise [TypeError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/type_check.rb#41
def error(value, message, types); end
end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/errors.rb#18
class Concurrent::Actor::UnknownMessage < ::Concurrent::Actor::Error
include ::Concurrent::Actor::TypeCheck
# @return [UnknownMessage] a new instance of UnknownMessage
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/errors.rb#23
def initialize(envelope); end
# Returns the value of attribute envelope.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/errors.rb#21
def envelope; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils.rb#3
module Concurrent::Actor::Utils; end
# Allows quick creation of actors with behaviour defined by blocks.
#
# @example ping
# AdHoc.spawn :forward, an_actor do |where|
# # this block has to return proc defining #on_message behaviour
# -> message { where.tell message }
# end
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/ad_hoc.rb#23
class Concurrent::Actor::Utils::AdHoc < ::Concurrent::Actor::Context
include ::Concurrent::Actor::Utils::AsAdHoc
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/ad_hoc.rb#7
module Concurrent::Actor::Utils::AsAdHoc
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/ad_hoc.rb#8
def initialize(*args, &initializer); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/ad_hoc.rb#12
def on_message(message); end
end
# Distributes messages between subscribed actors. Each actor'll get only one message then
# it's unsubscribed. The actor needs to resubscribe when it's ready to receive next message.
# It will buffer the messages if there is no worker registered.
#
# @see Pool
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/balancer.rb#11
class Concurrent::Actor::Utils::Balancer < ::Concurrent::Actor::RestartingContext
# @return [Balancer] a new instance of Balancer
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/balancer.rb#13
def initialize; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/balancer.rb#37
def distribute; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/balancer.rb#18
def on_message(message); end
end
# Allows to build pub/sub easily.
#
# @example news
# news_channel = Concurrent::Actor::Utils::Broadcast.spawn :news
#
# 2.times do |i|
# Concurrent::Actor::Utils::AdHoc.spawn "listener-#{i}" do
# news_channel << :subscribe
# -> message { puts message }
# end
# end
#
# news_channel << 'Ruby rocks!'
# # prints: 'Ruby rocks!' twice
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/broadcast.rb#21
class Concurrent::Actor::Utils::Broadcast < ::Concurrent::Actor::RestartingContext
# @return [Broadcast] a new instance of Broadcast
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/broadcast.rb#23
def initialize; end
# override to define different behaviour, filtering etc
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/broadcast.rb#46
def filtered_receivers; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/broadcast.rb#27
def on_message(message); end
end
# Allows to create a pool of workers and distribute work between them
#
# @example
# class Worker < Concurrent::Actor::RestartingContext
# def on_message(message)
# p message * 5
# end
# end
#
# pool = Concurrent::Actor::Utils::Pool.spawn! 'pool', 5 do |index|
# Worker.spawn name: "worker-#{index}", supervise: true, args: []
# end
#
# pool << 'asd' << 2
# # prints:
# # "asdasdasdasdasd"
# # 10
# @param size [Integer] number of workers
# @yield [balancer, index] a block spawning an worker instance. called +size+ times.
# The worker should be descendant of AbstractWorker and supervised, see example.
# @yieldparam balancer [Balancer] to pass to the worker
# @yieldparam index [Integer] of the worker, usually used in its name
# @yieldreturn [Reference] the reference of newly created worker
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/pool.rb#30
class Concurrent::Actor::Utils::Pool < ::Concurrent::Actor::RestartingContext
# @return [Pool] a new instance of Pool
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/pool.rb#31
def initialize(size, &worker_initializer); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/actor/utils/pool.rb#40
def on_message(message); end
end
class Concurrent::CAtomicBoolean
def initialize(*_arg0); end
def false?; end
def make_false; end
def make_true; end
def true?; end
def value; end
def value=(_arg0); end
end
class Concurrent::CAtomicFixnum
def initialize(*_arg0); end
def compare_and_set(_arg0, _arg1); end
def decrement(*_arg0); end
def down(*_arg0); end
def increment(*_arg0); end
def up(*_arg0); end
def update; end
def value; end
def value=(_arg0); end
end
# The Cancellation abstraction provides cooperative cancellation.
#
# The standard methods `Thread#raise` of `Thread#kill` available in Ruby
# are very dangerous (see linked the blog posts bellow).
# Therefore concurrent-ruby provides an alternative.
# * <https://jvns.ca/blog/2015/11/27/why-rubys-timeout-is-dangerous-and-thread-dot-raise-is-terrifying/>
# * <http://www.mikeperham.com/2015/05/08/timeout-rubys-most-dangerous-api/>
# * <http://blog.headius.com/2008/02/rubys-threadraise-threadkill-timeoutrb.html>
#
# It provides an object which represents a task which can be executed,
# the task has to get the reference to the object and periodically cooperatively check that it is not cancelled.
# Good practices to make tasks cancellable:
# * check cancellation every cycle of a loop which does significant work,
# * do all blocking actions in a loop with a timeout then on timeout check cancellation
# and if ok block again with the timeout
#
# The idea was inspired by <https://msdn.microsoft.com/en-us/library/dd537607(v=vs.110).aspx>
# {include:file:docs-source/cancellation.out.md}
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/cancellation.rb#36
class Concurrent::Cancellation < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# Creates the cancellation object.
#
# @example
# cancellation, origin = Concurrent::Cancellation.new
# @param origin [Promises::Future, Promises::Event] of the cancellation.
# When it is resolved the cancellation is canceled.
# @return [Cancellation] a new instance of Cancellation
# @see #to_ary
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/cancellation.rb#54
def initialize(origin = T.unsafe(nil)); end
# Is the cancellation cancelled?
# Respective, was the origin of the cancellation resolved.
#
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/cancellation.rb#77
def canceled?; end
# Raise error when cancelled
#
# @param error [#exception] to be risen
# @raise the error
# @return [self]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/cancellation.rb#85
def check!(error = T.unsafe(nil)); end
# Short string representation.
#
# @return [String]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/cancellation.rb#101
def inspect; end
# Creates a new Cancellation which is cancelled when first
# of the supplied cancellations or self is cancelled.
#
# @param cancellations [Cancellation] to combine
# @return [Cancellation] new cancellation
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/cancellation.rb#95
def join(*cancellations); end
# The event or future which is the origin of the cancellation
#
# @return [Promises::Future, Promises::Event]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/cancellation.rb#70
def origin; end
# Allow to multi-assign the Cancellation object
#
# @example
# cancellation = Concurrent::Cancellation.new
# cancellation, origin = Concurrent::Cancellation.new
# @return [Array(Cancellation, Promises::Future), Array(Cancellation, Promises::Event)]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/cancellation.rb#64
def to_ary; end
# Short string representation.
#
# @return [String]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/cancellation.rb#101
def to_s; end
class << self
# Create Cancellation which will cancel itself in given time
#
# @return [Cancellation]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/cancellation.rb#43
def timeout(intended_time); end
end
end
# {include:file:docs-source/channel.md}
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#4
class Concurrent::Channel
include ::Enumerable
extend ::Forwardable
# @return [Channel] a new instance of Channel
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#47
def initialize(opts = T.unsafe(nil)); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#75
def <<(item); end
# source://forwardable/1.3.3/forwardable.rb#231
def blocking?(*args, **_arg1, &block); end
# source://forwardable/1.3.3/forwardable.rb#231
def capacity(*args, **_arg1, &block); end
# source://forwardable/1.3.3/forwardable.rb#231
def close(*args, **_arg1, &block); end
# source://forwardable/1.3.3/forwardable.rb#231
def closed?(*args, **_arg1, &block); end
# @raise [ArgumentError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#193
def each; end
# source://forwardable/1.3.3/forwardable.rb#231
def empty?(*args, **_arg1, &block); end
# source://forwardable/1.3.3/forwardable.rb#231
def full?(*args, **_arg1, &block); end
# source://forwardable/1.3.3/forwardable.rb#231
def length(*args, **_arg1, &block); end
# @example
#
# jobs = Channel.new
#
# Channel.go do
# loop do
# j, more = jobs.next
# if more
# print "received job #{j}\n"
# else
# print "received all jobs\n"
# break
# end
# end
# end
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#159
def next; end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#165
def next?; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#99
def offer(item); end
# @raise [Error]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#104
def offer!(item); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#111
def offer?(item); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#175
def poll; end
# @raise [Error]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#179
def poll!; end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#185
def poll?; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#75
def put(item); end
# @raise [Error]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#82
def put!(item); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#89
def put?(item); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#121
def receive; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#75
def send(item); end
# source://forwardable/1.3.3/forwardable.rb#231
def size(*args, **_arg1, &block); end
# source://forwardable/1.3.3/forwardable.rb#231
def stop(*args, **_arg1, &block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#121
def take; end
# @raise [Error]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#128
def take!; end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#134
def take?; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#121
def ~; end
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#257
def buffer; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#261
def buffer=(value); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#290
def do_next; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#282
def do_offer(item); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#294
def do_poll; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#278
def do_put(item); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#286
def do_take; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#265
def validate(value, allow_nil, raise_error); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#249
def validator; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#253
def validator=(value); end
class << self
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#206
def after(seconds); end
# @raise [ArgumentError]
# @yield [selector, args]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#216
def alt(*args); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#224
def go(*args, &block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#233
def go_loop(*args, &block); end
# @raise [ArgumentError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#237
def go_loop_via(executor, *args, &block); end
# @raise [ArgumentError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#228
def go_via(executor, *args, &block); end
# @raise [ArgumentError]
# @yield [selector, args]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#216
def select(*args); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#211
def tick(interval); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#211
def ticker(interval); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#206
def timer(seconds); end
end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#21
Concurrent::Channel::BUFFER_TYPES = T.let(T.unsafe(nil), Hash)
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#5
module Concurrent::Channel::Buffer; end
# Abstract base class for all Channel buffers.
#
# {Concurrent::Channel} objects maintain an internal, queue-like
# object called a buffer. It's the storage bin for values put onto or
# taken from the channel. Different buffer types have different
# characteristics. Subsequently, the behavior of any given channel is
# highly dependent uping the type of its buffer. This is the base class
# which defines the common buffer interface. Any class intended to be
# used as a channel buffer should extend this class.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#16
class Concurrent::Channel::Buffer::Base < ::Concurrent::Synchronization::LockableObject
# Creates a new buffer.
#
# @return [Base] a new instance of Base
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#27
def initialize(*args); end
# Predicate indicating if this buffer will block {#put} operations
# once it reaches its maximum capacity.
#
# @return [Boolean] true if this buffer blocks else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#44
def blocking?; end
# The maximum number of values which can be {#put} onto the buffer
# it becomes full.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#22
def capacity; end
# Close the buffer, preventing new items from being added. Once a
# buffer is closed it cannot be opened again.
#
# @return [Boolean] true if the buffer was open and successfully
# closed else false.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#176
def close; end
# Predicate indicating is this buffer closed.
#
# @return [Boolea] true when closed else false.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#187
def closed?; end
# Predicate indicating if the buffer is empty.
#
# @raise [NotImplementedError] until overridden in a subclass.
# @return [Boolean] true if this buffer is empty else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#62
def empty?; end
# Predicate indicating if the buffer is full.
#
# @raise [NotImplementedError] until overridden in a subclass.
# @return [Boolean] true if this buffer is full else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#73
def full?; end
# Take the next "item" from the buffer and also return a boolean
# indicating if "more" items can be taken. Used for iterating
# over a buffer until it is closed and empty.
#
# If the buffer is open but no items remain the calling thread will
# block until an item is available. The second of the two return
# values, "more" (a boolean), will always be `true` when the buffer is
# open. The "more" value will be `false` when the channel has been
# closed and all values have already been received. When "more" is
# false the returned item will be `Concurrent::NULL`.
#
# Note that when multiple threads access the same channel a race
# condition can occur when using this method. A call to `next` from
# one thread may return `true` for the second return value, but
# another thread may `take` the last value before the original
# thread makes another call. Code which iterates over a channel
# must be programmed to properly handle these race conditions.
#
# @raise [NotImplementedError] until overridden in a subclass.
# @return [Object, Boolean] the first return value will be the item
# taken from the buffer and the second return value will be a
# boolean indicating whether or not more items remain.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#151
def next; end
# Put an item onto the buffer if possible. If the buffer is open but
# unable to add an item, probably due to being full, the method will
# return immediately. Similarly, the method will return immediately
# when the buffer is closed. A return value of `false` does not
# necessarily indicate that the buffer is closed, just that the item
# could not be added.
#
# @param item [Object] the item/value to put onto the buffer.
# @raise [NotImplementedError] until overridden in a subclass.
# @return [Boolean] true if the item was added to the buffer else
# false (always false when closed).
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#106
def offer(item); end
# Take the next item from the buffer if one is available else return
# immediately. Failing to return a value does not necessarily
# indicate that the buffer is closed, just that it is empty.
#
# @raise [NotImplementedError] until overridden in a subclass.
# @return [Object] the next item from the buffer or `Concurrent::NULL` if
# the buffer is empty.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#165
def poll; end
# Put an item onto the buffer if possible. If the buffer is open
# but not able to accept the item the calling thread will block
# until the item can be put onto the buffer.
#
# @param item [Object] the item/value to put onto the buffer.
# @raise [NotImplementedError] until overridden in a subclass.
# @return [Boolean] true if the item was added to the buffer else
# false (always false when closed).
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#88
def put(item); end
# The number of items currently in the buffer.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#51
def size; end
# Take an item from the buffer if one is available. If the buffer
# is open and no item is available the calling thread will block
# until an item is available. If the buffer is closed but items
# are available the remaining items can still be taken. Once the
# buffer closes, no remaining items can be taken.
#
# @raise [NotImplementedError] until overridden in a subclass.
# @return [Object] the item removed from the buffer; `Concurrent::NULL` once
# the buffer has closed.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#122
def take; end
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#193
def buffer; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#197
def buffer=(value); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#205
def capacity=(value); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#201
def closed=(value); end
# Predicate indicating is this buffer closed.
#
# @return [Boolea] true when closed else false.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#232
def ns_closed?; end
# Predicate indicating if the buffer is empty.
#
# @raise [NotImplementedError]
# @return [Boolean] true if this buffer is empty else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#222
def ns_empty?; end
# Predicate indicating if the buffer is full.
#
# @raise [NotImplementedError]
# @return [Boolean] true if this buffer is full else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#227
def ns_full?; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#213
def ns_initialize(*args); end
# The number of items currently in the buffer.
#
# @raise [NotImplementedError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#217
def ns_size; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/base.rb#209
def size=(value); end
end
# A buffer with a fixed internal capacity. Items can be put onto the
# buffer without blocking until the internal capacity is reached. Once
# the buffer is at capacity, subsequent calls to {#put} will block until
# an item is removed from the buffer, creating spare capacity.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/buffered.rb#12
class Concurrent::Channel::Buffer::Buffered < ::Concurrent::Channel::Buffer::Base
# Take the next "item" from the buffer and also return a boolean
# indicating if "more" items can be taken. Used for iterating
# over a buffer until it is closed and empty.
#
# If the buffer is open but no items remain the calling thread will
# block until an item is available. The second of the two return
# values, "more" (a boolean), will always be `true` when the buffer is
# open. The "more" value will be `false` when the channel has been
# closed and all values have already been received. When "more" is
# false the returned item will be `Concurrent::NULL`.
#
# Note that when multiple threads access the same channel a race
# condition can occur when using this method. A call to `next` from
# one thread may return `true` for the second return value, but
# another thread may `take` the last value before the original
# thread makes another call. Code which iterates over a channel
# must be programmed to properly handle these race conditions.
#
# @return [Object, Boolean] the first return value will be the item
# taken from the buffer and the second return value will be a
# boolean indicating whether or not more items remain.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/buffered.rb#56
def next; end
# Put an item onto the buffer if possible. If the buffer is open but
# unable to add an item, probably due to being full, the method will
# return immediately. Similarly, the method will return immediately
# when the buffer is closed. A return value of `false` does not
# necessarily indicate that the buffer is closed, just that the item
# could not be added.
#
# New items can be put onto the buffer until the number of items in
# the buffer reaches the {#size} value specified during
# initialization.
#
# @param item [Object] the item/value to put onto the buffer.
# @return [Boolean] true if the item was added to the buffer else
# false (always false when closed).
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/buffered.rb#38
def offer(item); end
# Take the next item from the buffer if one is available else return
# immediately. Failing to return a value does not necessarily
# indicate that the buffer is closed, just that it is empty.
#
# @return [Object] the next item from the buffer or `Concurrent::NULL` if
# the buffer is empty.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/buffered.rb#71
def poll; end
# Put an item onto the buffer if possible. If the buffer is open
# but not able to accept the item the calling thread will block
# until the item can be put onto the buffer.
#
# New items can be put onto the buffer until the number of items in
# the buffer reaches the {#size} value specified during
# initialization.
#
# @param item [Object] the item/value to put onto the buffer.
# @return [Boolean] true if the item was added to the buffer else
# false (always false when closed).
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/buffered.rb#19
def put(item); end
# Take an item from the buffer if one is available. If the buffer
# is open and no item is available the calling thread will block
# until an item is available. If the buffer is closed but items
# are available the remaining items can still be taken. Once the
# buffer closes, no remaining items can be taken.
#
# @return [Object] the item removed from the buffer; `Concurrent::NULL` once
# the buffer has closed.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/buffered.rb#50
def take; end
private
# Predicate indicating if the buffer is empty.
#
# @return [Boolean] true if this buffer is empty else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/buffered.rb#100
def ns_empty?; end
# Predicate indicating if the buffer is full.
#
# @return [Boolean] true if this buffer is full else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/buffered.rb#105
def ns_full?; end
# Creates a new buffer.
#
# @param size [Integer] the maximum capacity of the buffer; must be
# greater than zero.
# @raise [ArgumentError] when the size is zero (0) or less.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/buffered.rb#88
def ns_initialize(size); end
# Put an item onto the buffer if possible. If the buffer is open
# but not able to accept the item the calling thread will block
# until the item can be put onto the buffer.
#
# @param item [Object] the item/value to put onto the buffer.
# @return [Boolean] true if the item was added to the buffer else
# false (always false when closed).
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/buffered.rb#110
def ns_put_onto_buffer(item); end
# The number of items currently in the buffer.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/buffered.rb#95
def ns_size; end
end
# A non-blocking, buffered buffer of fixed maximum capacity. When the
# maximum capacity is reached subsequent {#put} and {#offer} operations
# will complete but the `put` item will be discarded; no transfer will
# occur.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/dropping.rb#12
class Concurrent::Channel::Buffer::Dropping < ::Concurrent::Channel::Buffer::Buffered
# Predicate indicating if this buffer will block {#put} operations
# once it reaches its maximum capacity.
#
# Always returns `false`.
#
# @return [Boolean] true if this buffer blocks else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/dropping.rb#36
def blocking?; end
private
# Predicate indicating if the buffer is full.
#
# @return [Boolean] true if this buffer is full else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/dropping.rb#43
def ns_full?; end
# Put an item onto the buffer if possible. If the buffer is open
# but not able to accept the item the calling thread will block
# until the item can be put onto the buffer.
#
# @param item [Object] the item/value to put onto the buffer.
# @return [Boolean] true if the item was added to the buffer else
# false (always false when closed).
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/dropping.rb#48
def ns_put_onto_buffer(item); end
end
# A non-blocking, buffered buffer of fixed maximum capacity. When the
# maximum capacity is reached subsequent {#put} and {#offer} operations
# will complete and the item will be `put`, but the oldest elements in
# the buffer will be discarded (not transferred).
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/sliding.rb#12
class Concurrent::Channel::Buffer::Sliding < ::Concurrent::Channel::Buffer::Buffered
# Predicate indicating if this buffer will block {#put} operations
# once it reaches its maximum capacity.
#
# Always returns `false`.
#
# @return [Boolean] true if this buffer blocks else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/sliding.rb#36
def blocking?; end
private
# Predicate indicating if the buffer is full.
#
# @return [Boolean] true if this buffer is full else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/sliding.rb#43
def ns_full?; end
# Put an item onto the buffer if possible. If the buffer is open
# but not able to accept the item the calling thread will block
# until the item can be put onto the buffer.
#
# @param item [Object] the item/value to put onto the buffer.
# @return [Boolean] true if the item was added to the buffer else
# false (always false when closed).
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/sliding.rb#48
def ns_put_onto_buffer(item); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/ticker.rb#10
class Concurrent::Channel::Buffer::Ticker < ::Concurrent::Channel::Buffer::Timer
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/ticker.rb#20
def do_poll; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/ticker.rb#14
def ns_initialize(interval); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/timer.rb#10
class Concurrent::Channel::Buffer::Timer < ::Concurrent::Channel::Buffer::Base
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/timer.rb#31
def next; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/timer.rb#16
def offer(item); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/timer.rb#39
def poll; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/timer.rb#12
def put(item); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/timer.rb#20
def take; end
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/timer.rb#64
def do_poll; end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/timer.rb#56
def ns_empty?; end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/timer.rb#60
def ns_full?; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/timer.rb#47
def ns_initialize(delay); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/timer.rb#52
def ns_size; end
end
# A blocking buffer with a size of zero. An item can only be put onto
# the buffer when a thread is waiting to take. Similarly, an item can
# only be put onto the buffer when a thread is waiting to put. When
# either {#put} or {#take} is called and there is no corresponding call
# in progress, the call will block indefinitely. Any other calls to the
# same method will queue behind the first call and block as well. As
# soon as a corresponding put/take call is made an exchange will occur
# and the first blocked call will return.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/unbuffered.rb#17
class Concurrent::Channel::Buffer::Unbuffered < ::Concurrent::Channel::Buffer::Base
# Predicate indicating if the buffer is empty.
#
# @return [Boolean] true if this buffer is empty else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/unbuffered.rb#27
def empty?; end
# Predicate indicating if the buffer is full.
#
# @return [Boolean] true if this buffer is full else false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/unbuffered.rb#32
def full?; end
# Take the next "item" from the buffer and also return a boolean
# indicating if "more" items can be taken. Used for iterating
# over a buffer until it is closed and empty.
#
# If the buffer is open but no items remain the calling thread will
# block until an item is available. The second of the two return
# values, "more" (a boolean), will always be `true` when the buffer is
# open. The "more" value will be `false` when the channel has been
# closed and all values have already been received. When "more" is
# false the returned item will be `Concurrent::NULL`.
#
# Note that when multiple threads access the same channel a race
# condition can occur when using this method. A call to `next` from
# one thread may return `true` for the second return value, but
# another thread may `take` the last value before the original
# thread makes another call. Code which iterates over a channel
# must be programmed to properly handle these race conditions.
#
# Items can only be taken from the buffer when one or more threads are
# waiting to {#put} items onto the buffer. This method exhibits the
# same blocking behavior as {#take}.
#
# @return [Object, Boolean] the first return value will be the item
# taken from the buffer and the second return value will be a
# boolean indicating whether or not more items remain.
# @see #take
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/unbuffered.rb#135
def next; end
# Put an item onto the buffer if possible. If the buffer is open but
# unable to add an item, probably due to being full, the method will
# return immediately. Similarly, the method will return immediately
# when the buffer is closed. A return value of `false` does not
# necessarily indicate that the buffer is closed, just that the item
# could not be added.
#
# Items can only be put onto the buffer when one or more threads are
# waiting to {#take} items off the buffer. When there is a thread
# waiting to take an item this method will give its item and return
# `true` immediately. When there are no threads waiting to take or the
# buffer is closed, this method will return `false` immediately.
#
# @param item [Object] the item/value to put onto the buffer.
# @return [Boolean] true if the item was added to the buffer else
# false (always false when closed).
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/unbuffered.rb#71
def offer(item); end
# Take the next item from the buffer if one is available else return
# immediately. Failing to return a value does not necessarily
# indicate that the buffer is closed, just that it is empty.
#
# Items can only be taken off the buffer when one or more threads are
# waiting to {#put} items onto the buffer. When there is a thread
# waiting to put an item this method will take the item and return
# it immediately. When there are no threads waiting to put or the
# buffer is closed, this method will return `Concurrent::NULL` immediately.
#
# @return [Object] the next item from the buffer or `Concurrent::NULL` if
# the buffer is empty.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/unbuffered.rb#117
def poll; end
# Put an item onto the buffer if possible. If the buffer is open
# but not able to accept the item the calling thread will block
# until the item can be put onto the buffer.
#
# Items can only be put onto the buffer when one or more threads are
# waiting to {#take} items off the buffer. When there is a thread
# waiting to take an item this method will give its item and return
# immediately. When there are no threads waiting to take, this method
# will block. As soon as a thread calls `take` the exchange will
# occur and this method will return.
#
# @param item [Object] the item/value to put onto the buffer.
# @return [Boolean] true if the item was added to the buffer else
# false (always false when closed).
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/unbuffered.rb#44
def put(item); end
# The number of items currently in the buffer.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/unbuffered.rb#20
def size; end
# Take an item from the buffer if one is available. If the buffer
# is open and no item is available the calling thread will block
# until an item is available. If the buffer is closed but items
# are available the remaining items can still be taken. Once the
# buffer closes, no remaining items can be taken.
#
# Items can only be taken from the buffer when one or more threads are
# waiting to {#put} items onto the buffer. When there is a thread
# waiting to put an item this method will take that item and return it
# immediately. When there are no threads waiting to put, this method
# will block. As soon as a thread calls `pur` the exchange will occur
# and this method will return.
#
# @return [Object] the item removed from the buffer; `Concurrent::NULL` once
# the buffer has closed.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/unbuffered.rb#89
def take; end
private
# Creates a new buffer.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/unbuffered.rb#148
def ns_initialize; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/unbuffered.rb#143
def putting; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/buffer/unbuffered.rb#145
def taking; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#29
Concurrent::Channel::DEFAULT_VALIDATOR = T.let(T.unsafe(nil), Proc)
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#32
class Concurrent::Channel::Error < ::StandardError; end
# NOTE: Move to global IO pool once stable
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#18
Concurrent::Channel::GOROUTINES = T.let(T.unsafe(nil), Concurrent::CachedThreadPool)
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/after_clause.rb#6
class Concurrent::Channel::Selector
# @return [Selector] a new instance of Selector
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector.rb#14
def initialize; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector.rb#40
def after(seconds, &block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector.rb#19
def case(channel, action, message = T.unsafe(nil), &block); end
# @raise [ArgumentError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector.rb#45
def default(&block); end
# @raise [ArgumentError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector.rb#50
def error(&block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector.rb#56
def execute; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector.rb#35
def put(channel, message, &block); end
# @raise [ArgumentError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector.rb#29
def receive(channel, &block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector.rb#35
def send(channel, message, &block); end
# @raise [ArgumentError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector.rb#29
def take(channel, &block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector.rb#40
def timeout(seconds, &block); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/after_clause.rb#8
class Concurrent::Channel::Selector::AfterClause
# @raise [ArgumentError]
# @return [AfterClause] a new instance of AfterClause
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/after_clause.rb#10
def initialize(seconds, block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/after_clause.rb#16
def execute; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/default_clause.rb#7
class Concurrent::Channel::Selector::DefaultClause
# @return [DefaultClause] a new instance of DefaultClause
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/default_clause.rb#9
def initialize(block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/default_clause.rb#13
def execute; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/put_clause.rb#7
class Concurrent::Channel::Selector::PutClause
# @return [PutClause] a new instance of PutClause
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/put_clause.rb#9
def initialize(channel, message, block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/put_clause.rb#15
def execute; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/take_clause.rb#7
class Concurrent::Channel::Selector::TakeClause
# @return [TakeClause] a new instance of TakeClause
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/take_clause.rb#9
def initialize(channel, block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/selector/take_clause.rb#14
def execute; end
end
# A convenience class representing a single moment in monotonic time.
# Returned by {Concurrent::Channel} tickers and timers when they
# resolve.
#
# Includes `Comparable` and can be compared to monotonic_time, UTC
# time, or epoch time.
#
# @see Concurrent.monotonic_time
# @see Concurrent::Channel.ticker
# @see Concurrent::Channel.timer
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/tick.rb#17
class Concurrent::Channel::Tick < ::Concurrent::Synchronization::Object
include ::Comparable
extend ::Concurrent::Synchronization::SafeInitialization
# @return [Tick] a new instance of Tick
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/tick.rb#25
def initialize(tick = T.unsafe(nil)); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/tick.rb#38
def <=>(other); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/tick.rb#30
def epoch; end
# Returns the value of attribute monotonic.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/tick.rb#23
def monotonic; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/tick.rb#34
def to_s; end
# Returns the value of attribute utc.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/tick.rb#23
def utc; end
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/tick.rb#52
def monotonic_to_utc(tick); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel/tick.rb#21
Concurrent::Channel::Tick::STRING_FORMAT = T.let(T.unsafe(nil), String)
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#34
class Concurrent::Channel::ValidationError < ::Concurrent::Channel::Error
# @return [ValidationError] a new instance of ValidationError
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/channel.rb#35
def initialize(message = T.unsafe(nil)); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/version.rb#2
Concurrent::EDGE_VERSION = T.let(T.unsafe(nil), String)
# A submodule for unstable, highly experimental features that are likely to
# change often and which may never become part of the core gem. Also for
# new, experimental version of abstractions already in the core gem.
#
# Most new features should start in this module, clearly indicating the
# experimental and unstable nature of the feature. Once a feature becomes
# more stable and is a candidate for inclusion in the core gem it should
# be moved up to the `Concurrent` module, where it would reside once merged
# into the core gem.
#
# The only exception to this is for features which *replace* features from
# the core gem in ways that are breaking and not backward compatible. These
# features should remain in this module until merged into the core gem. This
# will prevent namespace collisions.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#4
module Concurrent::Edge; end
# This class implements a lock-free linked set. The general idea of this
# implementation is this: each node has a successor which is an Atomic
# Markable Reference. This is used to ensure that all modifications to the
# list are atomic, preserving the structure of the linked list under _any_
# circumstance in a multithreaded application.
#
# One interesting aspect of this algorithm occurs with removing a node.
# Instead of physically removing a node when remove is called, a node is
# logically removed, by 'marking it.' By doing this, we prevent calls to
# `remove` from traversing the list twice to perform a physical removal.
# Instead, we have have calls to `add` and `remove` clean up all marked
# nodes they encounter while traversing the list.
#
# This algorithm is a variation of the Nonblocking Linked Set found in
# 'The Art of Multiprocessor Programming' by Herlihy and Shavit.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#5
class Concurrent::Edge::LockFreeLinkedSet
include ::Enumerable
# @param initial_size [Fixnum] the size of the linked_list to initialize
# @return [LockFreeLinkedSet] a new instance of LockFreeLinkedSet
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set.rb#28
def initialize(initial_size = T.unsafe(nil), val = T.unsafe(nil)); end
# ock_free_linked_list_method_<<
#
# Atomically adds the item to the set if it does not yet exist.
#
# @param item [Object] the item you wish to insert
# @return [Object] the set on which the :<< method was invoked
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set.rb#72
def <<(item); end
# Atomically adds the item to the set if it does not yet exist. Note:
# internally the set uses `Object#hash` to compare equality of items,
# meaning that Strings and other objects will be considered equal
# despite being different objects.
#
# that the item was already in the set.
#
# @param item [Object] the item you wish to insert
# @return [Boolean] `true` if successful. A `false` return indicates
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set.rb#48
def add(item); end
# Atomically checks to see if the set contains an item. This method
# compares equality based on the `Object#hash` method, meaning that the
# hashed contents of an object is what determines equality instead of
# `Object#object_id`
#
# @param item [Object] the item you to check for presence in the set
# @return [Boolean] whether or not the item is in the set
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set.rb#87
def contains?(item); end
# An iterator to loop through the set.
#
# @return [Object] self: the linked set on which each was called
# @yield [item] each item in the set
# @yieldparam item [Object] the item you to remove from the set
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set.rb#132
def each; end
# Atomically attempts to remove an item, comparing using `Object#hash`.
#
# @param item [Object] the item you to remove from the set
# @return [Boolean] whether or not the item was removed from the set
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set.rb#105
def remove(item); end
end
# Internal sentinel node for the Head of the set. Head is always smaller
# than any other node.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#74
class Concurrent::Edge::LockFreeLinkedSet::Head < ::Concurrent::Edge::LockFreeLinkedSet::Node
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#75
def <=>(_other); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#6
class Concurrent::Edge::LockFreeLinkedSet::Node < ::Concurrent::Synchronization::Object
include ::Comparable
extend ::Concurrent::Synchronization::SafeInitialization
# @return [Node] a new instance of Node
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#11
def initialize(data = T.unsafe(nil), successor = T.unsafe(nil)); end
# We use `Object#hash` as a way to enforce ordering on the nodes. This
# can be configurable in the future; for example, you could enforce a
# split-ordering on the nodes in the set.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#51
def <=>(other); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#18
def data; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#26
def key; end
# This method provides a unqiue key for the data which will be used for
# ordering. This is configurable, and changes depending on how you wish
# the nodes to be ordered.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#44
def key_for(data); end
# Check to see if the node is the last in the list.
#
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#31
def last?; end
# Next node in the list. Note: this is not the AtomicMarkableReference
# of the next node, this is the actual Node itself.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#37
def next_node; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#22
def successor_reference; end
end
# Internal sentinel node for the Tail. It is always greater than all
# other nodes, and it is self-referential; meaning its successor is
# a self-loop.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#59
class Concurrent::Edge::LockFreeLinkedSet::Tail < ::Concurrent::Edge::LockFreeLinkedSet::Node
# @return [Tail] a new instance of Tail
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#60
def initialize(_data = T.unsafe(nil), _succ = T.unsafe(nil)); end
# Always greater than other nodes. This means that traversal will end
# at the tail node since we are comparing node size in the traversal.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/node.rb#66
def <=>(_other); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/window.rb#4
class Concurrent::Edge::LockFreeLinkedSet::Window
# @return [Window] a new instance of Window
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/window.rb#7
def initialize(pred, curr); end
# Returns the value of attribute curr.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/window.rb#5
def curr; end
# Sets the attribute curr
#
# @param value the value to set the attribute curr to.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/window.rb#5
def curr=(_arg0); end
# Returns the value of attribute pred.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/window.rb#5
def pred; end
# Sets the attribute pred
#
# @param value the value to set the attribute pred to.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/window.rb#5
def pred=(_arg0); end
class << self
# This method is used to find a 'window' for which `add` and `remove`
# methods can use to know where to add and remove from the list. However,
# it has another responsibilility, which is to physically unlink any
# nodes marked for removal in the set. This prevents adds/removes from
# having to retraverse the list to physically unlink nodes.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_linked_set/window.rb#16
def find(head, item); end
end
end
# This module provides actor abstraction that has same behaviour as Erlang actor.
#
# {include:file:docs-source/erlang_actor.out.md}
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#15
module Concurrent::ErlangActor
include ::Concurrent::ErlangActor::EnvironmentConstants
extend ::Concurrent::ErlangActor::Functions
extend ::Concurrent::ErlangActor::FunctionShortcuts
extend ::Logger::Severity
extend ::Concurrent::Concern::Logging
class << self
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1535
def create(type, channel, environment, name, executor); end
end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#645
class Concurrent::ErlangActor::AbstractActor < ::Concurrent::Synchronization::Object
include ::Concurrent::ErlangActor::EnvironmentConstants
include ::Logger::Severity
include ::Concurrent::Concern::Logging
extend ::Concurrent::Synchronization::SafeInitialization
# @param mailbox [Promises::Channel]
# @return [AbstractActor] a new instance of AbstractActor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#652
def initialize(mailbox, environment, name, executor); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#695
def ask(message, timeout, timeout_value); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#726
def ask_op(message, probe); end
# @raise [ArgumentError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#812
def demonitor(reference, *options); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#766
def link(pid); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#788
def linked?(pid); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#792
def monitor(pid); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#839
def monitoring?(reference); end
# @raise [ArgumentError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#757
def on(matcher, value = T.unsafe(nil), &block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#742
def pid; end
# @raise [NotImplementedError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#762
def receive(*rules, timeout: T.unsafe(nil), timeout_value: T.unsafe(nil), **options, &block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#860
def reply_resolution(fulfilled, value, reason); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#843
def spawn(*args, type:, channel:, environment:, name:, link:, monitor:, executor:, &body); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#685
def tell(message, timeout = T.unsafe(nil)); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#676
def tell_op(message); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#865
def terminate(pid = T.unsafe(nil), reason, value: T.unsafe(nil)); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#738
def terminated; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#750
def trap(value = T.unsafe(nil)); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#746
def traps?; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#783
def unlink(pid); end
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1031
def after_termination(final_reason); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#930
def asked?; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#880
def canonical_rules(rules, timeout, timeout_value, given_block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#934
def clean_reply(reason = T.unsafe(nil)); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#987
def consume_exit(exit_message); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#941
def consume_signal(message); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#913
def eval_task(message, job); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1019
def initial_signal_consumption; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#921
def send_exit_messages(reason); end
# @raise [NotImplementedError]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1027
def terminate_self(reason, value); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1251
class Concurrent::ErlangActor::AbstractSignal < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1257
class Concurrent::ErlangActor::Ask < ::Concurrent::ErlangActor::AbstractSignal
# @raise [ArgumentError]
# @return [Ask] a new instance of Ask
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1260
def initialize(message, probe); end
# Returns the value of attribute message.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1258
def message; end
# Returns the value of attribute probe.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1258
def probe; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1398
class Concurrent::ErlangActor::DeMonitor < ::Concurrent::ErlangActor::AbstractSignal
include ::Concurrent::ErlangActor::HasFrom
include ::Concurrent::ErlangActor::HasReference
end
# A message send by a monitored actor when terminated.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1467
class Concurrent::ErlangActor::Down
# @return [Down] a new instance of Down
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1476
def initialize(from, reference, info); end
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1488
def ==(o); end
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1488
def eql?(o); end
# @return [Pid]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1469
def from; end
# @return [Integer]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1495
def hash; end
# @return [Object]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1473
def info; end
# @return [Reference]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1471
def reference; end
# @return [::Array(Pis, Reference, Object)]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1483
def to_ary; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1436
class Concurrent::ErlangActor::DownSignal < ::Concurrent::ErlangActor::AbstractSignal
include ::Concurrent::ErlangActor::HasFrom
include ::Concurrent::ErlangActor::HasReference
# @return [DownSignal] a new instance of DownSignal
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1443
def initialize(from, reference, info); end
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1454
def ==(o); end
# @return [Integer]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1459
def hash; end
# @return [Object]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1440
def info; end
# @return [::Array(Pis, Reference, Object)]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1449
def to_ary; end
end
# A class providing environment and methods for actor bodies to run in.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#140
class Concurrent::ErlangActor::Environment < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# @return [Environment] a new instance of Environment
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#471
def initialize(actor, executor); end
# @return [ExecutorService] a default executor which is picked by spawn call
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#465
def default_executor; end
# If MonitorRef is a reference which the calling actor obtained by calling {#monitor},
# this monitoring is turned off.
# If the monitoring is already turned off, nothing happens.
#
# Once demonitor has returned it is guaranteed that no {DownSignal} message
# due to the monitor will be placed in the caller's message queue in the future.
# A {DownSignal} message might have been placed in the caller's message queue prior to the call, though.
# Therefore, in most cases, it is advisable to remove such a 'DOWN' message from the message queue
# after monitoring has been stopped.
# `demonitor(reference, :flush)` can be used if this cleanup is wanted.
#
# The behavior of this method can be viewed as two combined operations:
# asynchronously send a "demonitor signal" to the monitored actor and
# ignore any future results of the monitor.
#
# Failure: It is an error if reference refers to a monitoring started by another actor.
# In that case it may raise an ArgumentError or go unnoticed.
#
# Options:
# * `:flush` - Remove (one) {DownSignal} message,
# if there is one, from the caller's message queue after monitoring has been stopped.
# Calling `demonitor(pid, :flush)` is equivalent to the following, but more efficient:
# ```ruby
# demonitor(pid)
# receive on(And[DownSignal, -> d { d.reference == reference}], true), timeout: 0, timeout_value: true
# ```
#
# * `info`
# The returned value is one of the following:
#
# - `true` - The monitor was found and removed.
# In this case no {DownSignal} message due to this monitor have been
# nor will be placed in the message queue of the caller.
# - `false` - The monitor was not found and could not be removed.
# This probably because someone already has placed a {DownSignal} message
# corresponding to this monitor in the caller's message queue.
#
# If the info option is combined with the flush option,
# `false` will be returned if a flush was needed; otherwise, `true`.
#
# @param reference [Reference]
# @param options [:flush, :info]
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#332
def demonitor(reference, *options); end
# Creates a link between the calling actor and another actor,
# if there is not such a link already.
# If a actor attempts to create a link to itself, nothing is done. Returns true.
#
# If pid does not exist,
# the behavior of the method depends on
# if the calling actor is trapping exits or not (see {#trap}):
# * If the calling actor is not trapping exits link raises with {NoActor}.
# * Otherwise, if the calling actor is trapping exits, link returns true,
# but an exit signal with reason noproc is sent to the calling actor.
#
# @raise [NoActor]
# @return [true]
# @see http://www1.erlang.org/doc/man/erlang.html#link-1
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#236
def link(pid); end
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#268
def linked?(pid); end
# The calling actor starts monitoring actor with given pid.
#
# A {DownSignal} message will be sent to the monitoring actor
# if the actor with given pid dies,
# or if the actor with given pid does not exist.
#
# The monitoring is turned off either
# when the {DownSignal} message is sent, or when {#demonitor} is called.
#
# Making several calls to monitor for the same pid is not an error;
# it results in as many, completely independent, monitorings.
#
# @return [Reference]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#285
def monitor(pid); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#337
def monitoring?(reference); end
# @return [#to_s] the name od the actor if provided to spawn method
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#154
def name; end
# Helper for constructing a {#receive} rules
#
# @example
# receive on(Numeric) { |v| v.succ },
# on(ANY) { terminate :bad_message }
# @see #receive
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#185
def on(matcher, value = T.unsafe(nil), &block); end
# @return [Pid] the pid of this actor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#149
def pid; end
# Receive a message.
#
# @option options
# @param rules [::Array(), ::Array(#===), ::Array<::Array(#===, Proc)>] * No rule - `receive`, `receive {|m| m.to_s}`
# * or single rule which can be combined with the supplied block -
# `receive(Numeric)`, `receive(Numeric) {|v| v.succ}`
# * or array of matcher-proc pairs -
# `receive on(Numeric) { |v| v*2 }, on(Symbol) { |c| do_command c }`
# @param timeout [Numeric] how long it should wait for the message
# @param timeout_value [Object] if rule `on(TIMEOUT) { do_something }` is not specified
# then timeout_value is returned.
# @param options [Hash] other options specific by type of the actor
# @return [Object, nothing] depends on type of the actor.
# On thread it blocks until message is available
# then it returns the message (or a result of a called block).
# On pool it stops executing and continues with a given block
# when message becomes available.
# @see ErlangActor Receiving chapter in the ErlangActor examples
# @yield [message] block
# to process the message
# if single matcher is supplied
# @yieldparam message [Object] the received message
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#218
def receive(*rules, timeout: T.unsafe(nil), timeout_value: T.unsafe(nil), **options, &block); end
# Shortcut for fulfilling the reply, same as `reply_resolution true, value, nil`.
#
# @example
# actor = Concurrent::ErlangActor.spawn(:on_thread) { reply receive * 2 }
# actor.ask 2 #=> 4
# @param value [Object]
# @return [true, false] did the sender ask, and was it resolved
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#405
def reply(value); end
# Reply to the sender of the message currently being processed
# if the actor was asked instead of told.
# The reply is stored in a {Promises::ResolvableFuture}
# so the arguments are same as for {Promises::ResolvableFuture#resolve} method.
#
# The reply may timeout, then this will fail with false.
#
# @example
# actor = Concurrent::ErlangActor.spawn(:on_thread) { reply_resolution true, receive * 2, nil }
# actor.ask 2 #=> 4
# @param fulfilled [true, false]
# @param value [Object]
# @param reason [Object]
# @return [true, false] did the sender ask, and was it resolved before it timed out?
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#426
def reply_resolution(fulfilled = T.unsafe(nil), value = T.unsafe(nil), reason = T.unsafe(nil)); end
# Creates an actor.
#
# @param args [Object] arguments for the actor body
# @param type [:on_thread, :on_pool] of the actor to be created.
# @param channel [Channel] The mailbox of the actor, by default it has unlimited capacity.
# Crating the actor with a bounded queue is useful to create backpressure.
# The channel can be shared with other abstractions
# but actor has to be the only consumer
# otherwise internal signals could be lost.
# @param environment [Environment, Module] A class which is used to run the body of the actor in.
# It can either be a child of {Environment} or a module.
# Module is extended to a new instance of environment,
# therefore if there is many actors with this module
# it is better to create a class and use it instead.
# @param name [#to_s] of the actor.
# Available by {Pid#name} or {Environment#name} and part of {Pid#to_s}.
# @param link [true, false] the created actor is atomically created and linked with the calling actor
# @param monitor [true, false] the created actor is atomically created and monitored by the calling actor
# @param executor [ExecutorService] The executor service to use to execute the actor on.
# Applies only to :on_pool actor type.
# @return [Pid, ::Array(Pid, Reference)] a pid or a pid-reference pair when monitor is true
# @see http://www1.erlang.org/doc/man/erlang.html#spawn-1
# @see http://www1.erlang.org/doc/man/erlang.html#spawn_link-1
# @see http://www1.erlang.org/doc/man/erlang.html#spawn_monitor-1
# @yield [*args] the body of the actor.
# When actor is spawned this block is evaluated
# until it terminates.
# The on-thread actor requires a block.
# The on-poll actor has a default `-> { start }`,
# therefore if not block is given it executes a #start method
# which needs to be provided with environment.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#378
def spawn(*args, type: T.unsafe(nil), channel: T.unsafe(nil), environment: T.unsafe(nil), name: T.unsafe(nil), executor: T.unsafe(nil), link: T.unsafe(nil), monitor: T.unsafe(nil), &body); end
# If pid **is not** provided stops the execution of the calling actor
# with the exit reason.
#
# If pid **is** provided,
# it sends an exit signal with exit reason to the actor identified by pid.
#
# The following behavior apply
# if `reason` is any object except `:normal` or `:kill`.
# If pid is not trapping exits,
# pid itself will exit with exit reason.
# If pid is trapping exits,
# the exit signal is transformed into a message {Terminated}
# and delivered to the message queue of pid.
#
# If reason is the Symbol `:normal`, pid will not exit.
# If it is trapping exits, the exit signal is transformed into a message {Terminated}
# and delivered to its message queue.
#
# If reason is the Symbol `:kill`, that is if `exit(pid, :kill)` is called,
# an untrappable exit signal is sent to pid which will unconditionally exit
# with exit reason `:killed`.
#
# Since evaluating this function causes the process to terminate, it has no return value.
#
# @param pid [Pid]
# @param reason [Object, :normal, :kill]
# @param value [Object]
# @return [nothing]
# @see http://www1.erlang.org/doc/man/erlang.html#error-1
# @see http://www1.erlang.org/doc/man/erlang.html#error-2
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#460
def terminate(pid = T.unsafe(nil), reason, value: T.unsafe(nil)); end
# @return [Promises::Future] a future which is resolved with
# the final result of the actor that is either the reason for
# termination or a value if terminated normally.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#144
def terminated; end
# When trap is set to true,
# exit signals arriving to a actor are converted to {Terminated} messages,
# which can be received as ordinary messages.
# If trap is set to false,
# the actor exits
# if it receives an exit signal other than normal
# and the exit signal is propagated to its linked actors.
# Application actors should normally not trap exits.
#
# @param value [true, false]
# @return [true, false] the old value of the flag
# @see http://www1.erlang.org/doc/man/erlang.html#process_flag-2
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#176
def trap(value = T.unsafe(nil)); end
# @return [true, false] does this actor trap exit messages?
# @see http://www1.erlang.org/doc/man/erlang.html#process_flag-2
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#160
def traps?; end
# Removes the link, if there is one,
# between the calling actor and the actor referred to by pid.
#
# Returns true and does not fail, even if there is no link to Id, or if Id does not exist.
#
# Once unlink(pid) has returned
# it is guaranteed
# that the link between the caller and the actor referred to by pid
# has no effect on the caller in the future (unless the link is setup again).
# If caller is trapping exits,
# an {Terminated} message due to the link might have been placed
# in the caller's message queue prior to the call, though.
#
# Note, the {Terminated} message can be the result of the link,
# but can also be the result of calling #terminate method externally.
# Therefore, it may be appropriate to cleanup the message queue
# when trapping exits after the call to unlink, as follow:
# ```ruby
# receive on(And[Terminated, -> e { e.pid == pid }], true), timeout: 0
# ```
#
# @return [true]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#262
def unlink(pid); end
end
# These constants are useful
# where the body of an actor is defined.
# For convenience they are provided in this module for including.
#
# @example
# include Concurrent::ErlangActor::EnvironmentConstants
# actor = Concurrent::ErlangActor.spawn(:on_thread) do
# receive on(Numeric) { |v| v.succ },
# on(ANY) { terminate :bad_message },
# on(TIMEOUT) { terminate :no_message },
# timeout: 1
# end
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#588
module Concurrent::ErlangActor::EnvironmentConstants; end
# A singleton which matches anything using #=== method
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#592
Concurrent::ErlangActor::EnvironmentConstants::ANY = T.let(T.unsafe(nil), Object)
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#594
class Concurrent::ErlangActor::EnvironmentConstants::AbstractLogicOperationMatcher
# @return [AbstractLogicOperationMatcher] a new instance of AbstractLogicOperationMatcher
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#599
def initialize(*matchers); end
class << self
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#595
def [](*matchers); end
end
end
# Combines matchers into one which matches if all match.
#
# @example
# And[Numeric, -> v { v >= 0 }] === 1 # => true
# And[Numeric, -> v { v >= 0 }] === -1 # => false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#608
class Concurrent::ErlangActor::EnvironmentConstants::And < ::Concurrent::ErlangActor::EnvironmentConstants::AbstractLogicOperationMatcher
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#610
def ===(v); end
end
# Combines matchers into one which matches if any matches.
#
# @example
# Or[Symbol, String] === :v # => true
# Or[Symbol, String] === 'v' # => true
# Or[Symbol, String] === 1 # => false
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#620
class Concurrent::ErlangActor::EnvironmentConstants::Or < ::Concurrent::ErlangActor::EnvironmentConstants::AbstractLogicOperationMatcher
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#622
def ===(v); end
end
# Unique identifier of a timeout, singleton.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#590
Concurrent::ErlangActor::EnvironmentConstants::TIMEOUT = T.let(T.unsafe(nil), Concurrent::ErlangActor::Token)
# Abstract error class for ErlangActor errors.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1501
class Concurrent::ErlangActor::Error < ::Concurrent::Error; end
# Constrains shortcuts for methods in {Functions}.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#535
module Concurrent::ErlangActor::FunctionShortcuts
# Optionally included shortcut method for {Functions#spawn_actor}
#
# @return [Pid]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#538
def spawn(*args, **kwargs, &body); end
# Optionally included shortcut method for {Functions#terminate_actor}
#
# @return [true]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#544
def terminate(pid, reason); end
end
# A module containing entry functions to actors like spawn_actor, terminate_actor.
# It can be included in environments working with actors.
#
# @example
# include Concurrent::ErlangActors::Functions
# actor = spawn_actor :on_pool do
# receive { |data| process data }
# end
# @see FunctionShortcuts
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#486
module Concurrent::ErlangActor::Functions
# @return [ExecutorService] the default executor service for actors
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#523
def default_actor_executor; end
# @return [ExecutorService] the default executor service,
# may be shared by other abstractions
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#529
def default_executor; end
# Creates an actor. Same as {Environment#spawn} but lacks link and monitor options.
#
# @param args [Object]
# @param type [:on_thread, :on_pool]
# @param channel [Channel]
# @param environment [Environment, Module]
# @param name [#to_s] of the actor
# @param executor [ExecutorService] of the actor
# @return [Pid]
# @see Environment#spawn
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#496
def spawn_actor(*args, type:, channel: T.unsafe(nil), environment: T.unsafe(nil), name: T.unsafe(nil), executor: T.unsafe(nil), &body); end
# Same as {Environment#terminate}, but it requires pid.
#
# @param pid [Pid]
# @param reason [Object, :normal, :kill]
# @return [true]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#513
def terminate_actor(pid, reason); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1270
module Concurrent::ErlangActor::HasFrom
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1276
def initialize(from); end
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1283
def ==(o); end
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1283
def eql?(o); end
# @return [Pid]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1273
def from; end
# @return [Integer]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1290
def hash; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1297
module Concurrent::ErlangActor::HasReason
include ::Concurrent::ErlangActor::HasFrom
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1304
def initialize(from, reason); end
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1316
def ==(o); end
# @return [Integer]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1322
def hash; end
# @return [Object]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1301
def reason; end
# @return [::Array(Pid, Object)]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1311
def to_ary; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1329
module Concurrent::ErlangActor::HasReference
include ::Concurrent::ErlangActor::HasFrom
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1336
def initialize(from, reference); end
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1348
def ==(o); end
# @return [Integer]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1354
def hash; end
# @return [Reference]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1333
def reference; end
# @return [::Array(Pid, Reference)]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1343
def to_ary; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#567
Concurrent::ErlangActor::JUMP = T.let(T.unsafe(nil), Concurrent::ErlangActor::Token)
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1541
Concurrent::ErlangActor::KLASS_MAP = T.let(T.unsafe(nil), Hash)
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1374
class Concurrent::ErlangActor::Kill < ::Concurrent::ErlangActor::AbstractSignal
include ::Concurrent::ErlangActor::HasFrom
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1380
class Concurrent::ErlangActor::Link < ::Concurrent::ErlangActor::AbstractSignal
include ::Concurrent::ErlangActor::HasFrom
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1392
class Concurrent::ErlangActor::Monitor < ::Concurrent::ErlangActor::AbstractSignal
include ::Concurrent::ErlangActor::HasFrom
include ::Concurrent::ErlangActor::HasReference
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#570
Concurrent::ErlangActor::NOTHING = T.let(T.unsafe(nil), Concurrent::ErlangActor::Token)
# An error used when actor tries to link or monitor terminated actor.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1505
class Concurrent::ErlangActor::NoActor < ::Concurrent::ErlangActor::Error
# @param pid [Pid]
# @return [self]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1511
def initialize(pid = T.unsafe(nil)); end
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1517
def ==(o); end
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1517
def eql?(o); end
# @return [Integer]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1524
def hash; end
# @return [Pid]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1507
def pid; end
end
# An error used when actor is asked but no reply was given or
# when the actor terminates before it gives a reply.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1531
class Concurrent::ErlangActor::NoReply < ::Concurrent::ErlangActor::Error; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1058
class Concurrent::ErlangActor::OnPool < ::Concurrent::ErlangActor::AbstractActor
# @return [OnPool] a new instance of OnPool
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1060
def initialize(channel, environment, name, executor); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1077
def receive(*rules, timeout: T.unsafe(nil), timeout_value: T.unsafe(nil), keep: T.unsafe(nil), &given_block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1067
def run(*args, &body); end
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1178
def apply_behaviour(message); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1094
def inner_run(*args, &body); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1139
def internal_receive; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1090
def terminate_self(reason, value); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1191
class Concurrent::ErlangActor::OnThread < ::Concurrent::ErlangActor::AbstractActor
# @return [OnThread] a new instance of OnThread
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1192
def initialize(channel, environment, name, executor); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1223
def receive(*rules, timeout: T.unsafe(nil), timeout_value: T.unsafe(nil), &given_block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1200
def run(*args, &body); end
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1244
def terminate_self(reason, value); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1197
module Concurrent::ErlangActor::OnThread::TERMINATE; end
# The public reference of the actor which can be stored and passed around.
# Nothing else of the actor should be exposed.
# {Functions.spawn_actor} and {Environment#spawn} return the pid.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#43
class Concurrent::ErlangActor::Pid < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# @return [Pid] a new instance of Pid
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#129
def initialize(actor, name); end
# The actor is asked the message and blocks until a reply is available,
# which is returned by the method.
# If the reply is a rejection then the methods raises it.
#
# If the actor does not call {Environment#reply} or {Environment#reply_resolution}
# the method will raise NoReply error.
# If the actor is terminated it will raise NoActor.
# Therefore the ask is never left unanswered and blocking.
#
# @param message [Object]
# @param timeout [Numeric] the maximum time in second to wait
# @param timeout_value [Object] the value returned on timeout
# @raise [NoReply, NoActor]
# @return [Object, timeout_value] reply to the message
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#81
def ask(message, timeout = T.unsafe(nil), timeout_value = T.unsafe(nil)); end
# Same as {#tell} but represented as a {Promises::Future}.
#
# @param message [Object]
# @param probe [Promises::ResolvableFuture] a resolvable future which is resolved with the reply.
# @return [Promises::Future(Object)] reply to the message
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#90
def ask_op(message, probe = T.unsafe(nil)); end
# @return [String] string representation
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#108
def inspect; end
# @return [#to_s, nil] optional name of the actor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#103
def name; end
# The actor is asynchronously told a message.
# The method returns immediately unless
# the actor has bounded mailbox and there is no more space for the message.
# Then the method blocks current thread until there is space available.
# This is useful for backpressure.
#
# @param message [Object]
# @param timeout [Numeric] the maximum time in second to wait
# @return [self, true, false] self if timeout was nil, false on timing out and true if told in time.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#56
def tell(message, timeout = T.unsafe(nil)); end
# Same as {#tell} but represented as a {Promises::Future}.
#
# @param message [Object]
# @return [Promises::Future(self)]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#63
def tell_op(message); end
# @return [Promises::Future] a future which is resolved with
# the final result of the actor that is either the reason for
# termination or a value if terminated normally.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#98
def terminated; end
# @return [String] string representation
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#108
def to_s; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#569
Concurrent::ErlangActor::RECEIVE = T.let(T.unsafe(nil), Concurrent::ErlangActor::Token)
# An object representing instance of a monitor, created with {Environment#monitor}.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#136
class Concurrent::ErlangActor::Reference; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#630
class Concurrent::ErlangActor::Run
# @return [Run] a new instance of Run
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#637
def initialize(future); end
# Returns the value of attribute future.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#631
def future; end
class << self
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#633
def [](future); end
end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#641
Concurrent::ErlangActor::Run::TEST = T.let(T.unsafe(nil), Proc)
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#568
Concurrent::ErlangActor::TERMINATE = T.let(T.unsafe(nil), Concurrent::ErlangActor::Token)
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1361
class Concurrent::ErlangActor::Terminate < ::Concurrent::ErlangActor::AbstractSignal
include ::Concurrent::ErlangActor::HasFrom
include ::Concurrent::ErlangActor::HasReason
# @return [Terminate] a new instance of Terminate
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1366
def initialize(from, reason, link_terminated = T.unsafe(nil)); end
# Returns the value of attribute link_terminated.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1364
def link_terminated; end
end
# A message send when actor terminates.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1405
class Concurrent::ErlangActor::Terminated
# @return [Terminated] a new instance of Terminated
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1412
def initialize(from, reason); end
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1424
def ==(o); end
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1424
def eql?(o); end
# @return [Pid]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1407
def from; end
# @return [Integer]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1431
def hash; end
# @return [Object]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1409
def reason; end
# @return [::Array(Pid, Object)]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1419
def to_ary; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#553
class Concurrent::ErlangActor::Token
# @return [Token] a new instance of Token
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#554
def initialize(name); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#558
def inspect; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#558
def to_s; end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/erlang_actor.rb#1386
class Concurrent::ErlangActor::UnLink < ::Concurrent::ErlangActor::AbstractSignal
include ::Concurrent::ErlangActor::HasFrom
end
# Hash-like collection that store lazy evaluated values.
#
# @example
# register = Concurrent::LazyRegister.new
# #=> #<Concurrent::LazyRegister:0x007fd7ecd5e230 @Data=#<Concurrent::AtomicReference:0x007fd7ecd5e1e0>>
# register[:key]
# #=> nil
# register.add(:key) { Concurrent::Actor.spawn!(Actor::AdHoc, :ping) { -> message { message } } }
# #=> #<Concurrent::LazyRegister:0x007fd7ecd5e230 @Data=#<Concurrent::AtomicReference:0x007fd7ecd5e1e0>>
# register[:key]
# #=> #<Concurrent::Actor::Reference /ping (Concurrent::Actor::AdHoc)>
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/lazy_register.rb#20
class Concurrent::LazyRegister < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# @return [LazyRegister] a new instance of LazyRegister
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/lazy_register.rb#25
def initialize; end
# Element reference. Retrieves the value object corresponding to the
# key object. Returns nil if the key is not found. Raises an exception
# if the stored item raised an exception when the block was evaluated.
#
# @param key [Object]
# @raise Exception when the initialization block fails
# @return [Object] value stored for the key or nil if the key is not found
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/lazy_register.rb#38
def [](key); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#137
def __initialize_atomic_fields__; end
# Element assignment. Associates the value given by value with the
# key given by key.
#
# @param key [Object]
# @return [LazyRegister] self
# @yield the object to store under the key
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/lazy_register.rb#61
def add(key, &block); end
# Un-registers the object under key, realized or not.
#
# @param key [Object]
# @return [LazyRegister] self
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/lazy_register.rb#75
def delete(key); end
# Returns true if the given key is present.
#
# @param key [Object]
# @return [true, false] if the key is registered
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/lazy_register.rb#47
def has_key?(key); end
# Returns true if the given key is present.
#
# @param key [Object]
# @return [true, false] if the key is registered
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/lazy_register.rb#47
def key?(key); end
# Element assignment. Associates the value given by value with the
# key given by key.
#
# @param key [Object]
# @return [LazyRegister] self
# @yield the object to store under the key
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/lazy_register.rb#61
def register(key, &block); end
# Returns true if the given key is present.
#
# @param key [Object]
# @return [true, false] if the key is registered
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/lazy_register.rb#47
def registered?(key); end
# Un-registers the object under key, realized or not.
#
# @param key [Object]
# @return [LazyRegister] self
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/lazy_register.rb#75
def remove(key); end
# Element assignment. Associates the value given by value with the
# key given by key.
#
# @param key [Object]
# @return [LazyRegister] self
# @yield the object to store under the key
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/lazy_register.rb#61
def store(key, &block); end
# Un-registers the object under key, realized or not.
#
# @param key [Object]
# @return [LazyRegister] self
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/lazy_register.rb#75
def unregister(key); end
private
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#105
def compare_and_set_data(expected, value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#93
def data; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#97
def data=(value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#101
def swap_data(value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#109
def update_data(&block); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_queue.rb#6
class Concurrent::LockFreeQueue < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# @return [LockFreeQueue] a new instance of LockFreeQueue
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_queue.rb#27
def initialize; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#137
def __initialize_atomic_fields__; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#105
def compare_and_set_head(expected, value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#105
def compare_and_set_tail(expected, value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#93
def head; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#97
def head=(value); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_queue.rb#65
def pop; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_queue.rb#35
def push(item); end
# approximate
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_queue.rb#104
def size; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#101
def swap_head(value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#101
def swap_tail(value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#93
def tail; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#97
def tail=(value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#109
def update_head(&block); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#109
def update_tail(&block); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_queue.rb#8
class Concurrent::LockFreeQueue::Node < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# @return [Node] a new instance of Node
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_queue.rb#11
def initialize(item, successor); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#137
def __initialize_atomic_fields__; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#105
def compare_and_set_successor(expected, value); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/lock_free_queue.rb#18
def item; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#93
def successor; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#97
def successor=(value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#101
def swap_successor(value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#109
def update_successor(&block); end
end
# A new implementation of actor which also simulates the process, therefore it can be used
# in the same way as Erlang's actors but **without** occupying thread. A tens of thousands
# ProcessingActors can run at the same time sharing a thread pool.
#
# @example
# # Runs on a pool, does not consume 50_000 threads
# actors = 50_000.times.map do |i|
# Concurrent::ProcessingActor.act(i) { |a, i| a.receive.then_on(:fast, i) { |m, i| m + i } }
# end
#
# actors.each { |a| a.tell 1 }
# values = actors.map(&:termination).map(&:value)
# values[0,5] # => [1, 2, 3, 4, 5]
# values[-5, 5] # => [49996, 49997, 49998, 49999, 50000]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#21
class Concurrent::ProcessingActor < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# @return [ProcessingActor] a new instance of ProcessingActor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#177
def initialize(channel, *args, &process); end
# actor.ask2 { |a| [:count, a] }
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#157
def ask_op(answer = T.unsafe(nil), &message_provider); end
# @return [String] string representation.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#165
def inspect; end
# @return [Promises::Channel] actor's mailbox.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#29
def mailbox; end
# # Receives a message when available, used in the actor's process.
# # @return [Promises::Future(Object)] a future which will be fulfilled with a message from
# # mailbox when it is available.
# def receive(*channels)
# channels = [@Mailbox] if channels.empty?
# Promises::Channel.select(*channels)
# # TODO (pitr-ch 27-Dec-2016): support patterns
# # - put any received message aside if it does not match
# # - on each receive call check the messages put aside
# # - track where the message came from, cannot later receive m form other channel only because it matches
# end
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#83
def receive(channel = T.unsafe(nil)); end
# Tells a message to the actor. May block current thread if the mailbox is full.
# {#tell_op} is a better option since it does not block. It's usually used to integrate with
# threading code.
#
# @example
# Thread.new(actor) do |actor|
# # ...
# actor.tell! :a_message # blocks until the message is told
# # (there is a space for it in the channel)
# # ...
# end
# @param message [Object]
# @return [self]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#99
def tell!(message); end
# Tells a message to the actor.
#
# @param message [Object]
# @return [Promises::Future(ProcessingActor)] a future which will be fulfilled with the actor
# when the message is pushed to mailbox.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#108
def tell_op(message); end
# @return [Promises::Future(Object)] a future which is resolved when the actor ends its processing.
# It can either be fulfilled with a value when actor ends normally or rejected with
# a reason (exception) when actor fails.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#36
def termination; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#171
def to_ary; end
# @return [String] string representation.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#165
def to_s; end
class << self
# Creates an actor.
#
# @example
# actor = Concurrent::ProcessingActor.act do |actor|
# actor.receive.then do |message|
# # the actor ends normally with message
# message
# end
# end
#
# actor.tell :a_message
# # => <#Concurrent::ProcessingActor:0x7fff11280560 termination:pending>
# actor.termination.value! # => :a_message
# @return [ProcessingActor]
# @see #act_listening Behaves the same way, but does not take mailbox as a first argument.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#54
def act(*args, &process); end
# Creates an actor listening to a specified channel (mailbox).
#
# @param args [Object] Arguments passed to the process.
# @param channel [Promises::Channel] which serves as mailing box. The channel can have limited
# size to achieve backpressure.
# @return [ProcessingActor]
# @yield [actor, *args] to the process to get back a future which represents the actors execution.
# @yieldparam actor [ProcessingActor]
# @yieldparam *args [Object]
# @yieldreturn [Promises::Future(Object)] a future representing next step of execution
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/processing_actor.rb#67
def act_listening(channel, *args, &process); end
end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#6
module Concurrent::Promises
extend ::Concurrent::Promises::FactoryMethods::Configuration
end
# A first in first out channel that accepts messages with push family of methods and returns
# messages with pop family of methods.
# Pop and push operations can be represented as futures, see {#pop_op} and {#push_op}.
# The capacity of the channel can be limited to support back pressure, use capacity option in {#initialize}.
# {#pop} method blocks ans {#pop_op} returns pending future if there is no message in the channel.
# If the capacity is limited the {#push} method blocks and {#push_op} returns pending future.
#
# {include:file:docs-source/channel.out.md}
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#17
class Concurrent::Promises::Channel < ::Concurrent::Synchronization::Object
extend ::Concurrent::Synchronization::SafeInitialization
# Create channel.
#
# @param capacity [Integer, UNLIMITED_CAPACITY] the maximum number of messages which can be stored in the channel.
# @return [Channel] a new instance of Channel
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#64
def initialize(capacity = T.unsafe(nil)); end
# @return [Integer] Maximum capacity of the Channel.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#295
def capacity; end
# @return [String] Short string representation.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#300
def inspect; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#346
def partial_select_op(matcher, probe); end
# Behaves as {#try_pop} but it does not remove the message from the channel
#
# @param no_value [Object] returned when there is no message available
# @return [Object, no_value] message or nil when there is no message
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#209
def peek(no_value = T.unsafe(nil)); end
# Behaves as {#try_pop} but it does not remove the message from the channel
#
# @param no_value [Object] returned when there is no message available
# @param matcher [#===] only consider message which matches `matcher === a_message`
# @return [Object, no_value] message or nil when there is no message
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#215
def peek_matching(matcher, no_value = T.unsafe(nil)); end
# Blocks current thread until a message is available in the channel for popping.
#
# @note This function potentially blocks current thread until it can continue.
# Be careful it can deadlock.
# @param timeout [Numeric] the maximum time in second to wait.
# @param timeout_value [Object] a value returned by the method when it times out
# @return [Object, nil] message or nil when timed out
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#177
def pop(timeout = T.unsafe(nil), timeout_value = T.unsafe(nil)); end
# Blocks current thread until a message is available in the channel for popping.
#
# @note This function potentially blocks current thread until it can continue.
# Be careful it can deadlock.
# @param timeout [Numeric] the maximum time in second to wait.
# @param timeout_value [Object] a value returned by the method when it times out
# @param matcher [#===] only consider message which matches `matcher === a_message`
# @return [Object, nil] message or nil when timed out
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#183
def pop_matching(matcher, timeout = T.unsafe(nil), timeout_value = T.unsafe(nil)); end
# Returns a future witch will become fulfilled with a value from the channel when one is available.
# If it is later waited on the operation with a timeout e.g.`channel.pop_op.wait(1)`
# it will not prevent the channel to fulfill the operation later after the timeout.
# The operation has to be either processed later
# ```ruby
# pop_op = channel.pop_op
# if pop_op.wait(1)
# process_message pop_op.value
# else
# pop_op.then { |message| log_unprocessed_message message }
# end
# ```
# or the operation can be prevented from completion after timing out by using
# `channel.pop_op.wait(1, [true, nil, nil])`.
# It will fulfill the operation on timeout preventing channel from doing the operation,
# e.g. popping a message.
#
# @param probe [ResolvableFuture] the future which will be fulfilled with a channel value
# @return [Future(Object)] the probe, its value will be the message when available.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#160
def pop_op(probe = T.unsafe(nil)); end
# Returns a future witch will become fulfilled with a value from the channel when one is available.
# If it is later waited on the operation with a timeout e.g.`channel.pop_op.wait(1)`
# it will not prevent the channel to fulfill the operation later after the timeout.
# The operation has to be either processed later
# ```ruby
# pop_op = channel.pop_op
# if pop_op.wait(1)
# process_message pop_op.value
# else
# pop_op.then { |message| log_unprocessed_message message }
# end
# ```
# or the operation can be prevented from completion after timing out by using
# `channel.pop_op.wait(1, [true, nil, nil])`.
# It will fulfill the operation on timeout preventing channel from doing the operation,
# e.g. popping a message.
#
# @param probe [ResolvableFuture] the future which will be fulfilled with a channel value
# @param matcher [#===] only consider message which matches `matcher === a_message`
# @return [Future(Object)] the probe, its value will be the message when available.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#166
def pop_op_matching(matcher, probe = T.unsafe(nil)); end
# Blocks current thread until the message is pushed into the channel.
#
# @note This function potentially blocks current thread until it can continue.
# Be careful it can deadlock.
# @param message [Object]
# @param timeout [Numeric] the maximum time in second to wait.
# @return [self, true, false] self implies timeout was not used, true implies timeout was used
# and it was pushed, false implies it was not pushed within timeout.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#120
def push(message, timeout = T.unsafe(nil)); end
# Returns future which will fulfill when the message is pushed to the channel.
# If it is later waited on the operation with a timeout e.g.`channel.pop_op.wait(1)`
# it will not prevent the channel to fulfill the operation later after the timeout.
# The operation has to be either processed later
# ```ruby
# pop_op = channel.pop_op
# if pop_op.wait(1)
# process_message pop_op.value
# else
# pop_op.then { |message| log_unprocessed_message message }
# end
# ```
# or the operation can be prevented from completion after timing out by using
# `channel.pop_op.wait(1, [true, nil, nil])`.
# It will fulfill the operation on timeout preventing channel from doing the operation,
# e.g. popping a message.
#
# @param message [Object]
# @return [ResolvableFuture(self)]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#101
def push_op(message); end
# As {#select_op} but does not return future,
# it block current thread instead until there is a message available
# in the receiver or in any of the channels.
#
# @note This function potentially blocks current thread until it can continue.
# Be careful it can deadlock.
# @param channels [Channel, ::Array<Channel>]
# @param timeout [Numeric] the maximum time in second to wait.
# @return [::Array(Channel, Object), nil] message or nil when timed out
# @see #select_op
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#278
def select(channels, timeout = T.unsafe(nil)); end
# As {#select_op} but does not return future,
# it block current thread instead until there is a message available
# in the receiver or in any of the channels.
#
# @note This function potentially blocks current thread until it can continue.
# Be careful it can deadlock.
# @param channels [Channel, ::Array<Channel>]
# @param timeout [Numeric] the maximum time in second to wait.
# @param matcher [#===] only consider message which matches `matcher === a_message`
# @return [::Array(Channel, Object), nil] message or nil when timed out
# @see #select_op
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#284
def select_matching(matcher, channels, timeout = T.unsafe(nil)); end
# When message is available in the receiver or any of the provided channels
# the future is fulfilled with a channel message pair.
# The returned channel is the origin of the message.
# If it is later waited on the operation with a timeout e.g.`channel.pop_op.wait(1)`
# it will not prevent the channel to fulfill the operation later after the timeout.
# The operation has to be either processed later
# ```ruby
# pop_op = channel.pop_op
# if pop_op.wait(1)
# process_message pop_op.value
# else
# pop_op.then { |message| log_unprocessed_message message }
# end
# ```
# or the operation can be prevented from completion after timing out by using
# `channel.pop_op.wait(1, [true, nil, nil])`.
# It will fulfill the operation on timeout preventing channel from doing the operation,
# e.g. popping a message.
#
# @param channels [Channel, ::Array<Channel>]
# @param probe [ResolvableFuture] the future which will be fulfilled with the message
# @return [ResolvableFuture(::Array(Channel, Object))] a future which is fulfilled with
# pair [channel, message] when one of the channels is available for reading
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#257
def select_op(channels, probe = T.unsafe(nil)); end
# When message is available in the receiver or any of the provided channels
# the future is fulfilled with a channel message pair.
# The returned channel is the origin of the message.
# If it is later waited on the operation with a timeout e.g.`channel.pop_op.wait(1)`
# it will not prevent the channel to fulfill the operation later after the timeout.
# The operation has to be either processed later
# ```ruby
# pop_op = channel.pop_op
# if pop_op.wait(1)
# process_message pop_op.value
# else
# pop_op.then { |message| log_unprocessed_message message }
# end
# ```
# or the operation can be prevented from completion after timing out by using
# `channel.pop_op.wait(1, [true, nil, nil])`.
# It will fulfill the operation on timeout preventing channel from doing the operation,
# e.g. popping a message.
#
# @param channels [Channel, ::Array<Channel>]
# @param probe [ResolvableFuture] the future which will be fulfilled with the message
# @param matcher [#===] only consider message which matches `matcher === a_message`
# @return [ResolvableFuture(::Array(Channel, Object))] a future which is fulfilled with
# pair [channel, message] when one of the channels is available for reading
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#263
def select_op_matching(matcher, channels, probe = T.unsafe(nil)); end
# @return [Integer] The number of messages currently stored in the channel.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#290
def size; end
# @return [String] Short string representation.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#300
def to_s; end
# Pop a message from the channel if there is one available.
#
# @param no_value [Object] returned when there is no message available
# @return [Object, no_value] message or nil when there is no message
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#138
def try_pop(no_value = T.unsafe(nil)); end
# Pop a message from the channel if there is one available.
#
# @param no_value [Object] returned when there is no message available
# @param matcher [#===] only consider message which matches `matcher === a_message`
# @return [Object, no_value] message or nil when there is no message
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#145
def try_pop_matching(matcher, no_value = T.unsafe(nil)); end
# Push the message into the channel if there is space available.
#
# @param message [Object]
# @return [true, false]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#77
def try_push(message); end
# If message is available in the receiver or any of the provided channels
# the channel message pair is returned. If there is no message nil is returned.
# The returned channel is the origin of the message.
#
# @param channels [Channel, ::Array<Channel>]
# @return [::Array(Channel, Object), nil] pair [channel, message] if one of the channels is available for reading
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#232
def try_select(channels); end
# If message is available in the receiver or any of the provided channels
# the channel message pair is returned. If there is no message nil is returned.
# The returned channel is the origin of the message.
#
# @param channels [Channel, ::Array<Channel>]
# @param matcher [#===] only consider message which matches `matcher === a_message`
# @return [::Array(Channel, Object), nil] pair [channel, message] if one of the channels is available for reading
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#238
def try_select_matching(matcher, channels); end
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#397
def ns_consume_pending_push(matcher, remove = T.unsafe(nil)); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#352
def ns_pop_op(matcher, probe, include_channel); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#437
def ns_shift_message(matcher, remove = T.unsafe(nil)); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#417
def ns_try_push(message); end
class << self
# @return [::Array(Channel, Object), nil]
# @see #select
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#322
def select(channels, timeout = T.unsafe(nil)); end
# @return [::Array(Channel, Object), nil]
# @see #select_matching
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#340
def select_matching(matcher, channels, timeout = T.unsafe(nil)); end
# @return [Future(::Array(Channel, Object))]
# @see #select_op
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#316
def select_op(channels, probe = T.unsafe(nil)); end
# @return [Future(::Array(Channel, Object))]
# @see #select_op_matching
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#334
def select_op_matching(matcher, channels, probe = T.unsafe(nil)); end
# @return [::Array(Channel, Object)]
# @see #try_select
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#310
def try_select(channels); end
# @return [::Array(Channel, Object)]
# @see #try_select_matching
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#328
def try_select_matching(matcher, channels); end
end
end
# An object which matches anything (with #===)
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#52
Concurrent::Promises::Channel::ANY = T.let(T.unsafe(nil), Object)
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#48
Concurrent::Promises::Channel::NOTHING = T.let(T.unsafe(nil), Object)
# Default capacity of the Channel, makes it accept unlimited number of messages.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/channel.rb#35
Concurrent::Promises::Channel::UNLIMITED_CAPACITY = T.let(T.unsafe(nil), Object)
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#68
module Concurrent::Promises::FactoryMethods
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#278
def any(*futures_and_or_events); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#319
def any_event(*futures_and_or_events); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#329
def any_event_on(default_executor, *futures_and_or_events); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#300
def any_fulfilled_future(*futures_and_or_events); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#313
def any_fulfilled_future_on(default_executor, *futures_and_or_events); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#278
def any_resolved_future(*futures_and_or_events); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#294
def any_resolved_future_on(default_executor, *futures_and_or_events); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#190
def delay(*args, &task); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#207
def delay_on(default_executor, *args, &task); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#127
def fulfilled_future(value, default_executor = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#94
def future(*args, &task); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#106
def future_on(default_executor, *args, &task); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#174
def make_future(argument = T.unsafe(nil), default_executor = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#136
def rejected_future(reason, default_executor = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#63
def resolvable_event; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#72
def resolvable_event_on(default_executor = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#78
def resolvable_future; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#88
def resolvable_future_on(default_executor = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#144
def resolved_event(default_executor = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#118
def resolved_future(fulfilled, value, reason, default_executor = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#214
def schedule(intended_time, *args, &task); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#233
def schedule_on(default_executor, intended_time, *args, &task); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#240
def zip(*futures_and_or_events); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#262
def zip_events(*futures_and_or_events); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#272
def zip_events_on(default_executor, *futures_and_or_events); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#240
def zip_futures(*futures_and_or_events); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#254
def zip_futures_on(default_executor, *futures_and_or_events); end
# @return [Future]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#72
def zip_futures_over(enumerable, &future_factory); end
# Creates new future which is resolved after all the futures created by future_factory from
# enumerable elements are resolved. Simplified it does:
# `zip(*enumerable.map { |e| future e, &future_factory })`
#
# @example
# # `#succ` calls are executed in parallel
# zip_futures_over_on(:io, [1, 2], &:succ).value! # => [2, 3]
# @param enumerable [Enumerable]
# @return [Future]
# @yield a task to be executed in future
# @yieldparam element [Object] from enumerable
# @yieldreturn [Object] a value of the future
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#90
def zip_futures_over_on(default_executor, enumerable, &future_factory); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#8
class Concurrent::Promises::Future < ::Concurrent::Promises::AbstractEventFuture
include ::Concurrent::Promises::Future::ActorIntegration
include ::Concurrent::Promises::Future::FlatShortcuts
include ::Concurrent::Promises::Future::NewChannelIntegration
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1070
def &(other); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1085
def any(event_or_future); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1215
def apply(args, block); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1095
def delay; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1013
def exception(*args); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1120
def flat(level = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1130
def flat_event; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1120
def flat_future(level = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#921
def fulfilled?; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1235
def inspect; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1136
def on_fulfillment(*args, &callback); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1147
def on_fulfillment!(*args, &callback); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1159
def on_fulfillment_using(executor, *args, &callback); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1165
def on_rejection(*args, &callback); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1176
def on_rejection!(*args, &callback); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1188
def on_rejection_using(executor, *args, &callback); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#966
def reason(timeout = T.unsafe(nil), timeout_value = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#928
def rejected?; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1052
def rescue(*args, &task); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1064
def rescue_on(executor, *args, &task); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#981
def result(timeout = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1210
def run(run_test = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1102
def schedule(intended_time); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1034
def then(*args, &task); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1046
def then_on(executor, *args, &task); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1222
def to_event; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1230
def to_future; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1235
def to_s; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#950
def value(timeout = T.unsafe(nil), timeout_value = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#997
def value!(timeout = T.unsafe(nil), timeout_value = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#987
def wait!(timeout = T.unsafe(nil)); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1111
def with_default_executor(executor); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1070
def zip(other); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1085
def |(event_or_future); end
private
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1272
def async_callback_on_fulfillment(state, executor, args, callback); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1278
def async_callback_on_rejection(state, executor, args, callback); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1284
def callback_on_fulfillment(state, args, callback); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1288
def callback_on_rejection(state, args, callback); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1292
def callback_on_resolution(state, args, callback); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1251
def rejected_resolution(raise_on_reassign, state); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1247
def run_test(v); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/promises.rb#1266
def wait_until_resolved!(timeout = T.unsafe(nil)); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#11
module Concurrent::Promises::Future::ActorIntegration
# Asks the actor with its value.
#
# @return [Future] new future with the response form the actor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#14
def then_ask(actor); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#22
module Concurrent::Promises::Future::FlatShortcuts
# @return [Future]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#25
def then_flat(*args, &block); end
# @return [Event]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#39
def then_flat_event(*args, &block); end
# @return [Event]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#44
def then_flat_event_on(executor, *args, &block); end
# @return [Future]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#25
def then_flat_future(*args, &block); end
# @return [Future]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#32
def then_flat_future_on(executor, *args, &block); end
# @return [Future]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#32
def then_flat_on(executor, *args, &block); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#54
module Concurrent::Promises::Future::NewChannelIntegration
# @param channel [Channel] to push to.
# @return [Future] a future which is fulfilled after the message is pushed to the channel.
# May take a moment if the channel is full.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#59
def then_channel_push(channel); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#96
module Concurrent::Promises::Resolvable
include ::Concurrent::Promises::InternalStates
# @return [true, false] on successful release of the reservation
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#121
def release; end
# Reserves the event or future, if reserved others are prevented from resolving it.
# Advanced feature.
# Be careful about the order of reservation to avoid deadlocks,
# the method blocks if the future or event is already reserved
# until it is released or resolved.
#
# @example
# f = Concurrent::Promises.resolvable_future
# reserved = f.reserve
# Thread.new { f.resolve true, :val, nil } # fails
# f.resolve true, :val, nil, true if reserved # must be called only if reserved
# @return [true, false] on successful reservation
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#111
def reserve; end
class << self
# Resolves all passed events and futures to the given resolutions
# if possible (all are unresolved) or none.
#
# @param resolvable_map [Hash{Resolvable=>resolve_arguments}, Array<Array(Resolvable, resolve_arguments)>] collection of resolvable events and futures which should be resolved all at once
# and what should they be resolved to, examples:
# ```ruby
# { a_resolvable_future1 => [true, :val, nil],
# a_resolvable_future2 => [false, nil, :err],
# a_resolvable_event => [] }
# ```
# or
# ```ruby
# [[a_resolvable_future1, [true, :val, nil]],
# [a_resolvable_future2, [false, nil, :err]],
# [a_resolvable_event, []]]
# ```
# @return [true, false] if success
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#150
def atomic_resolution(resolvable_map); end
# @return [Comparable] an item to sort the resolvable events or futures
# by to get the right global locking order of resolvable events or futures
# @see .atomic_resolution
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/promises.rb#128
def locking_order_by(resolvable); end
end
end
# A tool managing concurrency level of tasks.
# The maximum capacity is set in constructor.
# Each acquire will lower the available capacity and release will increase it.
# When there is no available capacity the current thread may either be blocked or
# an event is returned which will be resolved when capacity becomes available.
#
# The more common usage of the Throttle is with a proxy executor
# `a_throttle.on(Concurrent.global_io_executor)`.
# Anything executed on the proxy executor will be throttled and
# execute on the given executor. There can be more than one proxy executors.
# All abstractions which execute tasks have option to specify executor,
# therefore the proxy executor can be injected to any abstraction
# throttling its concurrency level.
#
# {include:file:docs-source/throttle.out.md}
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#23
class Concurrent::Throttle < ::Concurrent::Synchronization::Object
include ::Concurrent::Promises::FactoryMethods::Configuration
include ::Concurrent::Promises::FactoryMethods
extend ::Concurrent::Synchronization::SafeInitialization
# Create throttle.
#
# @param capacity [Integer] How many tasks using this throttle can run at the same time.
# @return [Throttle] a new instance of Throttle
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#37
def initialize(capacity); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#137
def __initialize_atomic_fields__; end
# Blocks current thread until there is capacity available in the throttle.
# The acquired capacity has to be returned to the throttle by calling {#release}.
# If block is passed then the block is called after the capacity is acquired and
# it is automatically released after the block is executed.
#
# @param timeout [Numeric] the maximum time in second to wait.
# @return [Object, self, true, false] * When no timeout and no block it returns self
# * When no timeout and with block it returns the result of the block
# * When with timeout and no block it returns true when acquired and false when timed out
# * When with timeout and with block it returns the result of the block of nil on timing out
# @see #release
# @yield [] block to execute after the capacity is acquired
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#63
def acquire(timeout = T.unsafe(nil), &block); end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#140
def acquire_or_event; end
# @return [Integer] The available capacity.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#30
def available_capacity; end
# Uses executor provided by {#on} therefore
# all events and futures created using factory methods on this object will be throttled.
# Overrides {Promises::FactoryMethods#default_executor}.
#
# @return [ExecutorService]
# @see Promises::FactoryMethods#default_executor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#183
def default_executor; end
# @return [String] Short string representation.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#133
def inspect; end
# @return [Integer] The maximum capacity.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#46
def max_capacity; end
# @example throttling future
# a_future.then_on(a_throttle.on(:io)) { a_throttled_task }
# @param executor [ExecutorService]
# @return [ExecutorService] An executor which wraps given executor and allows to post tasks only
# as available capacity in the throttle allows.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#162
def on(executor = T.unsafe(nil)); end
# Releases previously acquired capacity back to Throttle.
# Has to be called exactly once for each acquired capacity.
#
# @return [self]
# @see #acquire_operation, #acquire, #try_acquire
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#118
def release; end
# @return [String] Short string representation.
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#133
def to_s; end
# Tries to acquire capacity from the throttle.
# Returns true when there is capacity available.
# The acquired capacity has to be returned to the throttle by calling {#release}.
#
# @return [true, false]
# @see #release
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#102
def try_acquire; end
private
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#93
def capacity; end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#97
def capacity=(value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#105
def compare_and_set_capacity(expected, value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#101
def swap_capacity(value); end
# source://concurrent-ruby/1.3.4/lib/concurrent-ruby/concurrent/synchronization/object.rb#109
def update_capacity(&block); end
end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#187
class Concurrent::Throttle::ProxyExecutor < ::Concurrent::Synchronization::Object
include ::Logger::Severity
include ::Concurrent::Concern::Logging
include ::Concurrent::ExecutorService
extend ::Concurrent::Synchronization::SafeInitialization
# @return [ProxyExecutor] a new instance of ProxyExecutor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#192
def initialize(throttle, executor); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#206
def can_overflow?; end
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#198
def post(*args, &task); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#210
def serialized?; end
private
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/edge/throttle.rb#216
def inner_post(*arguments, &task); end
end
# A delegating executor which modifies each task with arguments
# before the task is given to the target executor it delegates to.
#
# @example Count task executions
# counter = AtomicFixnum.new
# count_executions = WrappingExecutor.new Concurrent.global_io_executor do |*args, &task|
# [*args, -> *args { counter.increment; task.call *args }]
# end
# 10.times { count_executions.post { :do_something } }
# sleep 0.01
# counter.value #=> 10
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/executor/wrapping_executor.rb#16
class Concurrent::WrappingExecutor < ::Concurrent::Synchronization::Object
include ::Logger::Severity
include ::Concurrent::Concern::Logging
include ::Concurrent::ExecutorService
extend ::Concurrent::Synchronization::SafeInitialization
# @param executor [Executor] an executor to delegate the tasks to
# @return [WrappingExecutor] a new instance of WrappingExecutor
# @yield [*args, &task] A function which can modify the task with arguments
# @yieldparam *args [Array<Object>] the arguments submitted with the tasks
# @yieldparam &task [block] the task submitted to the executor to be modified
# @yieldreturn [Array<Object>] a new arguments and task `[*args, task]` which are submitted to the target executor
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/executor/wrapping_executor.rb#26
def initialize(executor, &wrapper); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/executor/wrapping_executor.rb#41
def can_overflow?; end
# @see #initialize how the tasks can be modified
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/executor/wrapping_executor.rb#35
def post(*args, &task); end
# @return [Boolean]
#
# source://concurrent-ruby-edge//lib/concurrent-ruby-edge/concurrent/executor/wrapping_executor.rb#46
def serialized?; end
end