Skip to content

Commit

Permalink
Merge pull request #3 from absinthe-graphql/ecto-caching
Browse files Browse the repository at this point in the history
Ecto Caching
  • Loading branch information
benwilson512 authored Oct 26, 2017
2 parents b804d46 + ab9952b commit b527d2d
Show file tree
Hide file tree
Showing 5 changed files with 54 additions and 21 deletions.
10 changes: 8 additions & 2 deletions lib/dataloader.ex
Original file line number Diff line number Diff line change
Expand Up @@ -101,14 +101,20 @@ defmodule Dataloader do
def get(loader, source, batch_key, item_key) do
loader
|> get_source(source)
|> Source.get(batch_key, item_key)
|> Source.fetch(batch_key, item_key)
|> do_get
end

defp do_get({:ok, val}), do: val
defp do_get(:error), do: nil

@spec get_many(t, source_name, any, any) :: [any] | no_return()
def get_many(loader, source, batch_key, item_keys) when is_list(item_keys) do
source = get_source(loader, source)
for key <- item_keys do
Source.get(source, batch_key, key)
source
|> Source.fetch(batch_key, key)
|> do_get
end
end

Expand Down
23 changes: 15 additions & 8 deletions lib/dataloader/ecto.ex
Original file line number Diff line number Diff line change
Expand Up @@ -160,19 +160,26 @@ if Code.ensure_loaded?(Ecto) do
%{source | results: results, batches: %{}}
end

def get(%{results: results} = source, batch, item) do
def fetch(%{results: results} = source, batch, item) do
batch = normalize_key(batch, source.default_params)
{batch_key, item_key, _item} = get_keys(batch, item)
results[batch_key][item_key]
with {:ok, batch} <- Map.fetch(results, batch_key) do
Map.fetch(batch, item_key)
end
end

def load(source, batch, item) do
batch = normalize_key(batch, source.default_params)
{batch_key, item_key, item} = get_keys(batch, item)
entry = {item_key, item}
update_in(source.batches, fn batches ->
Map.update(batches, batch_key, [entry], &[entry | &1])
end)
case fetch(source, batch, item) do
:error ->
batch = normalize_key(batch, source.default_params)
{batch_key, item_key, item} = get_keys(batch, item)
entry = {item_key, item}
update_in(source.batches, fn batches ->
Map.update(batches, batch_key, [entry], &[entry | &1])
end)
_ ->
source
end
end

def pending_batches?(%{batches: batches}) do
Expand Down
16 changes: 9 additions & 7 deletions lib/dataloader/kv.ex
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ defmodule Dataloader.KV do
keyed by id.
## Examples
"""

defstruct [
Expand All @@ -30,19 +30,21 @@ defmodule Dataloader.KV do

defimpl Dataloader.Source do
def load(source, batch_key, id) do
case get(source, batch_key, id) do
nil ->
case fetch(source, batch_key, id) do
:error ->
update_in(source.batches[batch_key], fn
nil -> [id]
ids -> [id | ids]
end)
_ ->
source # cached
source ->
source
end
end

def get(source, batch_key, id) do
source.results[batch_key][id]
def fetch(source, batch_key, id) do
with {:ok, batch} <- Map.fetch(source, batch_key) do
Map.fetch(batch, id)
end
end

def run(source) do
Expand Down
4 changes: 2 additions & 2 deletions lib/dataloader/source.ex
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ defprotocol Dataloader.Source do
@spec run(t) :: t
def run(source)

@spec get(t, batch_key, item_key) :: term
def get(source, batch_key, item_key)
@spec fetch(t, batch_key, item_key) :: {:ok, term} | :error
def fetch(source, batch_key, item_key)

@spec pending_batches?(t) :: boolean
def pending_batches?(source)
Expand Down
22 changes: 20 additions & 2 deletions test/dataloader/ecto_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ defmodule Dataloader.EctoTest do
]
TestRepo.insert_all(User, users)

source = Dataloader.Ecto.new(TestRepo)
test_pid = self()
source = Dataloader.Ecto.new(TestRepo, query: &query(&1, &2, test_pid))

loader =
Dataloader.new()
Expand All @@ -24,17 +25,34 @@ defmodule Dataloader.EctoTest do
users = TestRepo.all(User)
user_ids = users |> Enum.map(&(&1.id))

loaded_users =
loader =
loader
|> Dataloader.load_many(Test, User, user_ids)
|> Dataloader.run

loaded_users =
loader
|> Dataloader.get_many(Test, User, user_ids)

assert_receive(:querying)

assert length(loaded_users) == 1
assert users == loaded_users

# loading again doesn't query again due to caching
loader
|> Dataloader.load_many(Test, User, user_ids)
|> Dataloader.run

refute_receive(:querying)
end

test "association loading works" do

end

defp query(queryable, _args, test_pid) do
send(test_pid, :querying)
queryable
end
end

0 comments on commit b527d2d

Please sign in to comment.