Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Return a normal Promise object from GraphQL::Batch::Loader#load #65

Merged
merged 2 commits into from
Sep 1, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ language: ruby
rvm:
- 2.1
- 2.2
- 2.3
before_install: gem install bundler -v 1.13.3
2 changes: 1 addition & 1 deletion lib/graphql/batch.rb
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def self.batch
raise NestedError if GraphQL::Batch::Executor.current
begin
GraphQL::Batch::Executor.current = GraphQL::Batch::Executor.new
Promise.sync(yield)
::Promise.sync(yield)
ensure
GraphQL::Batch::Executor.current = nil
end
Expand Down
2 changes: 1 addition & 1 deletion lib/graphql/batch/execution_strategy.rb
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def execute(_, _, query)

# Needed for MutationExecutionStrategy
def deep_sync(result) #:nodoc:
Promise.sync(as_promise_unless_resolved(result))
::Promise.sync(as_promise_unless_resolved(result))
end

private
Expand Down
45 changes: 21 additions & 24 deletions lib/graphql/batch/executor.rb
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@ def self.current=(executor)
Thread.current[THREAD_KEY] = executor
end

attr_reader :loaders

# Set to true when performing a batch query, otherwise, it is false.
#
# Can be used to detect unbatched queries in an ActiveSupport::Notifications.subscribe block.
Expand All @@ -23,42 +21,41 @@ def initialize
@loading = false
end

def resolve(loader)
with_loading(true) { loader.resolve }
def loader(key)
@loaders[key] ||= yield.tap do |loader|
loader.executor = self
loader.loader_key = key
end
end

def shift
@loaders.shift.last
def resolve(loader)
was_loading = @loading
@loading = true
loader.resolve
ensure
@loading = was_loading
end

def tick
resolve(shift)
resolve(@loaders.shift.last)
end

def wait_all
tick until loaders.empty?
tick until @loaders.empty?
end

def clear
loaders.clear
@loaders.clear
end

def defer
# Since we aren't actually deferring callbacks, we need to set #loading to false so that any queries
# that happen in the callback aren't interpreted as being performed in GraphQL::Batch::Loader#perform
with_loading(false) { yield }
end

private

def with_loading(loading)
def around_promise_callbacks
# We need to set #loading to false so that any queries that happen in the promise
# callback aren't interpreted as being performed in GraphQL::Batch::Loader#perform
was_loading = @loading
begin
@loading = loading
yield
ensure
@loading = was_loading
end
@loading = false
yield
ensure
@loading = was_loading
end
end
end
41 changes: 25 additions & 16 deletions lib/graphql/batch/loader.rb
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,7 @@ def self.for(*group_args)
" `GraphQL::Batch::Setup` as a query instrumenter if using with `graphql-ruby`"
end

executor.loaders[loader_key] ||= new(*group_args).tap do |loader|
loader.loader_key = loader_key
loader.executor = executor
end
executor.loader(loader_key) { new(*group_args) }
end

def self.loader_key_for(*group_args)
Expand All @@ -35,7 +32,7 @@ def self.load_many(keys)
def load(key)
cache[cache_key(key)] ||= begin
queue << key
Promise.new.tap { |promise| promise.source = self }
::Promise.new.tap { |promise| promise.source = self }
end
end

Expand All @@ -50,9 +47,7 @@ def resolve #:nodoc:
perform(load_keys)
check_for_broken_promises(load_keys)
rescue => err
each_pending_promise(load_keys) do |key, promise|
promise.reject(err)
end
reject_pending_promises
end

# For Promise#sync
Expand All @@ -72,7 +67,15 @@ def resolved?

# Fulfill the key with provided value, for use in #perform
def fulfill(key, value)
promise_for(key).fulfill(value)
finish_resolve(key) do |promise|
promise.fulfill(value)
end
end

def reject(key, reason)
finish_resolve(key) do |promise|
promise.reject(reason)
end
end

# Returns true when the key has already been fulfilled, otherwise returns false
Expand All @@ -92,6 +95,14 @@ def cache_key(load_key)

private

def finish_resolve(key)
promise = promise_for(key)
return yield(promise) unless executor
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

is this line necessary since it will raise if not wrapped in GraphQL::Batch.batch in for?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The executor is mostly just used for storing loaders so they can be looked up for grouping, however, an alternative would be to store loaders in a context. We started supporting using loaders without an executor in #35.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

oh yes, of course! 👍

executor.around_promise_callbacks do
yield promise
end
end

def cache
@cache ||= {}
end
Expand All @@ -104,18 +115,16 @@ def promise_for(load_key)
cache.fetch(cache_key(load_key))
end

def each_pending_promise(load_keys)
def reject_pending_promises
load_keys.each do |key|
promise = promise_for(key)
if promise.pending?
yield key, promise
end
# promise.rb ignores reject if promise isn't pending
reject(key, err)
end
end

def check_for_broken_promises(load_keys)
each_pending_promise(load_keys) do |key, promise|
promise.reject(::Promise::BrokenError.new("#{self.class} didn't fulfill promise for key #{key.inspect}"))
load_keys.each do |key|
reject(key, ::Promise::BrokenError.new("#{self.class} didn't fulfill promise for key #{key.inspect}"))
end
end
end
Expand Down
2 changes: 1 addition & 1 deletion lib/graphql/batch/mutation_execution_strategy.rb
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def get_finished_value(raw_value)
GraphQL::Batch::Executor.current.clear
begin
strategy.enable_batching = true
strategy.deep_sync(Promise.sync(super))
strategy.deep_sync(::Promise.sync(super))
ensure
strategy.enable_batching = false
GraphQL::Batch::Executor.current.clear
Expand Down
8 changes: 3 additions & 5 deletions lib/graphql/batch/promise.rb
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
module GraphQL::Batch
class Promise < ::Promise
def defer
executor = Executor.current
executor ? executor.defer { super } : super
end
Promise = ::Promise
if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new("2.3")
deprecate_constant :Promise
end
end
2 changes: 1 addition & 1 deletion lib/graphql/batch/setup.rb
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def instrument_field(schema, type, field)
resolve ->(obj, args, ctx) {
GraphQL::Batch::Executor.current.clear
begin
Promise.sync(old_resolve_proc.call(obj, args, ctx))
::Promise.sync(old_resolve_proc.call(obj, args, ctx))
ensure
GraphQL::Batch::Executor.current.clear
end
Expand Down