diff --git a/.envrc b/.envrc new file mode 100644 index 00000000..ce92b19d --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +export ECTO=elixir-ecto diff --git a/lib/dx.ex b/lib/dx.ex index 31acb388..d3f1082a 100644 --- a/lib/dx.ex +++ b/lib/dx.ex @@ -37,7 +37,7 @@ defmodule Dx do doesn't need to be loaded again. Can be initialized using `Loaders.Dataloader.init/0`. """ - alias Dx.{Engine, Result, Util} + alias Dx.{Engine, Result, Schema, Util} alias Dx.Evaluation, as: Eval @doc """ @@ -46,9 +46,15 @@ defmodule Dx do Does not load any additional data. """ def get(records, predicates, opts \\ []) do - eval = Eval.from_options(opts) + type = type(records) + eval = Eval.from_options(opts) |> Map.put(:root_type, type) + + {expanded, _type} = + predicates + |> expand() + |> Schema.expand_mapping(type, eval) - do_get(records, predicates, eval) + do_get(records, expanded, eval) |> Result.to_simple_if(not eval.return_cache?) end @@ -57,16 +63,12 @@ defmodule Dx do end defp do_get(record, predicates, eval) when is_list(predicates) do - Result.map(predicates, &Engine.resolve_predicate(&1, record, eval)) + Result.map(predicates, &Engine.execute(&1, record, eval)) |> Result.transform(&Util.Map.zip(predicates, &1)) end - defp do_get(record, predicate, eval) when is_atom(predicate) do - Engine.resolve_predicate(predicate, record, eval) - end - defp do_get(record, result, eval) do - Engine.map_result(result, %{eval | root_subject: record}) + Engine.execute(result, record, eval) end @doc """ @@ -81,12 +83,30 @@ defmodule Dx do Like `get/3`, but loads additional data if needed. """ def load(records, predicates, opts \\ []) do - eval = Eval.from_options(opts) + type = type(records) + eval = Eval.from_options(opts) |> Map.put(:root_type, type) + + {expanded, _type} = + predicates + |> expand() + |> Schema.expand_mapping(type, eval) - do_load(records, predicates, eval) + do_load(records, expanded, eval) |> Result.to_simple_if(not eval.return_cache?) end + defp expand(predicates) when is_list(predicates) do + predicates + |> Map.new(&{&1, {:ref, &1}}) + end + + defp expand(other) do + other + end + + defp type([%type{} | _]), do: type + defp type(%type{}), do: type + defp do_load(records, predicates, eval) do load_all_data_reqs(eval, fn eval -> do_get(records, predicates, eval) diff --git a/lib/dx/ecto/query.ex b/lib/dx/ecto/query.ex index f49c6f99..391b959d 100644 --- a/lib/dx/ecto/query.ex +++ b/lib/dx/ecto/query.ex @@ -3,7 +3,7 @@ defmodule Dx.Ecto.Query do Functions to dynamically generate Ecto query parts. """ - alias Dx.{Result, Util} + alias Dx.Result alias Dx.Evaluation, as: Eval alias __MODULE__.Builder @@ -29,7 +29,22 @@ defmodule Dx.Ecto.Query do @doc """ Add predicate-based filters to a queryable and return it. """ + def where(queryable, condition, opts \\ []) when is_list(opts) do + type = queryable + eval = Eval.from_options(opts) |> Map.put(:root_type, type) + + {expanded, _binds} = + condition + |> Dx.Schema.expand_condition(type, eval) + + case apply_condition(queryable, expanded, eval) do + {queryable, true} -> queryable + {queryable, condition} -> raise TranslationError, queryable: queryable, condition: condition + end + end + + def execute_where(queryable, condition, opts \\ []) when is_list(opts) do eval = Eval.from_options(opts) case apply_condition(queryable, condition, eval) do @@ -121,10 +136,6 @@ defmodule Dx.Ecto.Query do end end - defp map_condition(builder, conditions) when is_map(conditions) do - map_condition(builder, {:all, conditions}) - end - defp map_condition(builder, {:all, conditions}) do Enum.reduce_while(conditions, {builder, true}, fn condition, {builder, acc_query} -> case map_condition(builder, condition) do @@ -160,10 +171,10 @@ defmodule Dx.Ecto.Query do :error end - defp map_condition(builder, {key, val}) when is_atom(key) do - case field_info(key, builder) do - :field -> - left = Builder.field(builder, key) + defp map_condition(builder, {key, val}) do + case key do + {:field, key} -> + left = Builder.field(builder, {:field, key}) case val do vals when is_list(vals) -> @@ -183,7 +194,7 @@ defmodule Dx.Ecto.Query do Enum.reduce_while(grouped_vals, {builder, false}, fn val, {builder, acc_query} -> case val do vals when is_list(vals) -> {builder, compare(left, :eq, vals, builder)} - val -> map_condition(builder, {key, val}) + val -> map_condition(builder, {{:field, key}, val}) end |> case do :error -> {:halt, :error} @@ -210,19 +221,19 @@ defmodule Dx.Ecto.Query do end end - {:predicate, rules} -> + {:predicate, _meta, rules} -> case rules_for_value(rules, val, builder) do :error -> :error condition -> map_condition(builder, condition) end - {:assoc, :one, _assoc} -> + {:assoc, :one, _type, _assoc} -> Builder.with_join(builder, key, fn builder -> map_condition(builder, val) end) - {:assoc, :many, assoc} -> - %{queryable: queryable, related_key: related_key, owner_key: owner_key} = assoc + {:assoc, :many, queryable, assoc} -> + %{related_key: related_key, owner_key: owner_key} = assoc as = Builder.current_alias(builder) @@ -261,8 +272,8 @@ defmodule Dx.Ecto.Query do end defp do_ref(builder, [field | path]) do - case field_info(field, builder) do - {:assoc, :one, _assoc} -> Builder.with_join(builder, field, &do_ref(&1, path)) + case field do + {:assoc, :one, _type, _assoc} -> Builder.with_join(builder, field, &do_ref(&1, path)) _other -> :error end end @@ -321,38 +332,6 @@ defmodule Dx.Ecto.Query do defp compare(left, op, val, %{negate?: true}) when op in @gt_ops, do: dynamic(^left <= ^val) - defp field_info(predicate, %Builder{} = builder) do - type = Builder.current_type(builder) - - case Util.rules_for_predicate(predicate, type, builder.eval) do - [] -> - case Util.Ecto.association_details(type, predicate) do - %_{cardinality: :one} = assoc -> - {:assoc, :one, assoc} - - %_{cardinality: :many} = assoc -> - {:assoc, :many, assoc} - - _other -> - case Util.Ecto.field_details(type, predicate) do - nil -> - raise ArgumentError, - """ - Unknown field #{inspect(predicate)} on #{inspect(type)}. - Path: #{inspect(builder.path)} - Types: #{inspect(builder.types)} - """ - - _other -> - :field - end - end - - rules -> - {:predicate, rules} - end - end - # maps a comparison of "predicate equals value" to a Dx condition defp rules_for_value(rules, val, %{negate?: false}) do vals = List.wrap(val) @@ -360,7 +339,7 @@ defmodule Dx.Ecto.Query do rules |> Enum.reverse() |> Enum.reduce_while(false, fn - {condition, val}, acc when is_simple(val) -> + {val, condition}, acc when is_simple(val) -> if val in vals do {:cont, [condition, acc]} else @@ -459,12 +438,29 @@ defmodule Dx.Ecto.Query do end end + def apply_expanded_options(queryable, opts) do + Enum.reduce(opts, {queryable, []}, fn + {:where, conditions}, {query, opts} -> {execute_where(query, conditions), opts} + {:limit, limit}, {query, opts} -> {limit(query, limit), opts} + {:order_by, order}, {query, opts} -> {order_by(query, order), opts} + other, {query, opts} -> {query, [other | opts]} + end) + |> case do + {queryable, opts} -> {queryable, Enum.reverse(opts)} + end + end + @doc "Apply all options to the given `queryable`, raise on any unknown option." def from_options(queryable, opts) do {queryable, []} = apply_options(queryable, opts) queryable end + def execute_options(queryable, opts) do + {queryable, []} = apply_expanded_options(queryable, opts) + queryable + end + def limit(queryable, limit) do from(q in queryable, limit: ^limit) end diff --git a/lib/dx/ecto/query/builder.ex b/lib/dx/ecto/query/builder.ex index 478bcd66..c8419917 100644 --- a/lib/dx/ecto/query/builder.ex +++ b/lib/dx/ecto/query/builder.ex @@ -70,14 +70,16 @@ defmodule Dx.Ecto.Query.Builder do def root_type(%Builder{aliases: {_, type, _}}), do: type def field(builder, key, maybe_parent? \\ false) - def field(%{path: [{:parent, as} | _]}, key, _), do: dynamic(field(parent_as(^as), ^key)) - def field(%{path: [as | _], in_subquery?: true}, key, true), + def field(%{path: [{:parent, as} | _]}, {:field, key}, _), do: dynamic(field(parent_as(^as), ^key)) - def field(%{path: [as | _]}, key, _), do: dynamic(field(as(^as), ^key)) + def field(%{path: [as | _], in_subquery?: true}, {:field, key}, true), + do: dynamic(field(parent_as(^as), ^key)) + + def field(%{path: [as | _]}, {:field, key}, _), do: dynamic(field(as(^as), ^key)) - def field(%{path: [], aliases: {as, _, _}}, key, _), do: dynamic(field(as(^as), ^key)) + def field(%{path: [], aliases: {as, _, _}}, {:field, key}, _), do: dynamic(field(as(^as), ^key)) def current_alias(%{path: [as | _]}), do: as def current_alias(%{path: [], aliases: {as, _, _}}), do: as @@ -181,16 +183,13 @@ defmodule Dx.Ecto.Query.Builder do {builder, as} end - def add_aliased_join(builder, key) do + def add_aliased_join(builder, assoc) do {builder, as} = next_alias(builder) left = current_alias(builder) - type = - case Dx.Util.Ecto.association_details(current_type(builder), key) do - %_{related: type} -> type - end + {:assoc, _, type, %{name: name}} = assoc - builder = update_query(builder, &aliased_join(&1, left, key, as)) + builder = update_query(builder, &aliased_join(&1, left, name, as)) %{builder | path: [as | builder.path], types: [type | builder.types]} end diff --git a/lib/dx/engine.ex b/lib/dx/engine.ex index 88118733..c15044dc 100644 --- a/lib/dx/engine.ex +++ b/lib/dx/engine.ex @@ -7,38 +7,50 @@ defmodule Dx.Engine do alias Dx.Evaluation, as: Eval @doc """ - Returns the result of evaluating a predicate. + Returns the result of evaluating a plan. """ - @spec resolve_predicate(atom(), struct(), Eval.t()) :: Result.v() - def resolve_predicate(predicate, %type{} = subject, %Eval{} = eval) do - predicate - |> Util.rules_for_predicate(type, eval) - |> match_rules(subject, predicate, eval) + def execute(plan, subject, eval) do + eval = %{eval | root_subject: subject} + # map_result(plan, eval) + resolve(plan, subject, eval) end @doc """ Returns the result of evaluating a field or predicate. """ @spec resolve(atom(), map(), Eval.t()) :: Result.v() - def resolve(field, subject, %Eval{resolve_predicates?: false} = eval) do - fetch(subject, field, eval) + def resolve({:assoc, _, _, %{name: name}}, subject, %Eval{} = eval) do + fetch(subject, name, eval) end - def resolve(field_or_predicate, %type{} = subject, %Eval{} = eval) do - eval = %{eval | resolve_predicates?: true} + def resolve({:field, name}, subject, %Eval{} = eval) do + fetch(subject, name, eval) + end - field_or_predicate - |> Util.rules_for_predicate(type, eval) - |> case do - [] -> fetch(subject, field_or_predicate, eval) - rules -> match_rules(rules, subject, field_or_predicate, eval) - end + def resolve({:predicate, _, _} = predicate, subject, eval) do + match_rules(predicate, subject, eval) end - def resolve(field, map, %Eval{} = eval) do + def resolve(field, map, %Eval{} = eval) when is_atom(field) do fetch(map, field, eval) end + def resolve(field, map, %Eval{} = eval) do + map_result(field, %{eval | root_subject: map}) + end + + def resolve_source({:assoc, _, _, %{name: name}}, %Eval{} = eval) do + fetch(eval.root_subject, name, eval) + end + + def resolve_source({:field, name}, %Eval{} = eval) do + fetch(eval.root_subject, name, eval) + end + + def resolve_source({:predicate, _, _} = predicate, eval) do + match_rules(predicate, eval.root_subject, eval) + end + def resolve_source(field_or_predicate, eval) when is_atom(field_or_predicate) do resolve(field_or_predicate, eval.root_subject, eval) end @@ -58,15 +70,14 @@ defmodule Dx.Engine do # - {:ok, true} -> stop here and return rule assigns # - {:not_loaded, data_reqs} -> collect and move on, return {:not_loaded, all_data_reqs} at the end # - {:error, e} -> return right away - @spec match_rules(list(Rule.t()), any(), atom(), Eval.t()) :: Result.v() - defp match_rules(rules, subject, predicate, %Eval{} = eval) do + @spec match_rules(list(Rule.t()), any(), Eval.t()) :: Result.v() + defp match_rules({:predicate, %{name: predicate}, rules}, subject, %Eval{} = eval) do eval = %{eval | root_subject: subject} result = - Result.find(rules, &match_next(&1, subject, eval), fn {_condition, val}, binds -> + Result.find(rules, &match_next(&1, subject, eval), fn {result, _condition}, binds -> eval = %{eval | binds: binds} - - map_result(val, eval) + map_result(result, eval) end) if eval.debug? == :trace do @@ -88,7 +99,11 @@ defmodule Dx.Engine do result end - defp match_next({condition, _val}, subject, eval) do + defp match_next({_result, true}, _subject, _eval) do + Result.ok(true) + end + + defp match_next({_result, condition}, subject, eval) do evaluate_condition(condition, subject, eval) end @@ -393,8 +408,9 @@ defmodule Dx.Engine do other -> Result.ok(other) end - rescue - e in KeyError -> {:error, e} + + # rescue + # e in KeyError -> {:error, e} end defp resolve_path(val, [], _eval), do: Result.ok(val) diff --git a/lib/dx/evaluation.ex b/lib/dx/evaluation.ex index 66c38f0f..9cf3cc65 100644 --- a/lib/dx/evaluation.ex +++ b/lib/dx/evaluation.ex @@ -7,17 +7,21 @@ defmodule Dx.Evaluation do use TypedStruct typedstruct do + # Schema + field(:root_type, Dx.Type.t()) + field(:binds, map(), default: %{}) + + # Engine field(:root_subject, map()) field(:cache, any()) - field(:return_cache?, boolean(), default: false) - field(:binds, map()) field(:negate?, boolean(), default: false) - field(:resolve_predicates?, boolean(), default: true) # Options field(:loader, module(), default: Dx.Loaders.Dataloader) field(:args, map(), default: %{}) + field(:resolve_predicates?, boolean(), default: true) field(:debug?, boolean(), default: false) + field(:return_cache?, boolean(), default: false) field(:extra_rules, list(module()), default: []) field(:select, any()) end diff --git a/lib/dx/loaders/dataloader.ex b/lib/dx/loaders/dataloader.ex index 8a63e545..aa4cf29d 100644 --- a/lib/dx/loaders/dataloader.ex +++ b/lib/dx/loaders/dataloader.ex @@ -31,19 +31,19 @@ defmodule Dx.Loaders.Dataloader do [:assoc, key, subject] end - defp args_for({:query_one, type, [main_condition | other_conditions], opts}) do + defp args_for({:query_one, type, [{{:field, field}, val} | other_conditions], opts}) do opts = opts |> where(other_conditions) - [:assoc, {:one, type, opts}, [main_condition]] + [:assoc, {:one, type, opts}, [{field, val}]] end - defp args_for({:query_first, type, [main_condition | other_conditions], opts}) do + defp args_for({:query_first, type, [{{:field, field}, val} | other_conditions], opts}) do opts = opts |> where(other_conditions) |> Keyword.put(:limit, 1) - [:assoc, {:one, type, opts}, [main_condition]] + [:assoc, {:one, type, opts}, [{field, val}]] end - defp args_for({:query_all, type, [main_condition | other_conditions], opts}) do + defp args_for({:query_all, type, [{{:field, field}, val} | other_conditions], opts}) do opts = opts |> where(other_conditions) - [:assoc, {:many, type, opts}, [main_condition]] + [:assoc, {:many, type, opts}, [{field, val}]] end defp where(opts, []), do: opts @@ -58,7 +58,7 @@ defmodule Dx.Loaders.Dataloader do source = Dataloader.Ecto.new(repo, - query: &Dx.Ecto.Query.from_options/2, + query: &Dx.Ecto.Query.execute_options/2, async: run_concurrently? ) diff --git a/lib/dx/schema.ex b/lib/dx/schema.ex new file mode 100644 index 00000000..07b9bc0a --- /dev/null +++ b/lib/dx/schema.ex @@ -0,0 +1,381 @@ +defmodule Dx.Schema do + alias Dx.Schema.Type + + @lt_ops ~w(< lt less_than before)a + @lte_ops ~w(<= lte less_than_or_equal on_or_before at_or_before)a + @gte_ops ~w(>= gte greater_than_or_equal on_or_after at_or_after)a + @gt_ops ~w(> gt greater_than after)a + @all_ops @lt_ops ++ @lte_ops ++ @gte_ops ++ @gt_ops + @primitives_1 @all_ops ++ ~w[not]a + + def expand_mapping(name, type, eval) when is_atom(name) do + expand_atom(name, type, eval) + end + + def expand_mapping(name, type, eval) do + expand_result(name, type, eval) + end + + def expand_result(%type{} = struct, parent_type, eval) do + {expanded, _} = + struct + |> Map.from_struct() + |> expand_result(parent_type, eval) + + {struct(type, expanded), type} + end + + def expand_result(mapping, type, eval) when is_map(mapping) do + {expanded, types} = + Enum.reduce(mapping, {%{}, %{}}, fn pair, {map, types} -> + {{key, expanded}, type} = expand_result(pair, type, eval) + map = Map.put(map, key, expanded) + types = Map.put(types, key, type) + {map, types} + end) + + {expanded, {:map, types}} + end + + def expand_result(list, type, eval) when is_list(list) do + {list, type} = + Enum.map_reduce(list, [], fn elem, list_type -> + {expanded, elem_type} = expand_result(elem, type, eval) + {expanded, Type.merge(list_type, elem_type)} + end) + + {list, {:array, type}} + end + + def expand_result({:ref, path}, _type, eval) do + {path, type} = expand_ref_path(path, eval.root_type, eval) + {{:ref, path}, type} + end + + def expand_result({:bound, key}, _type, eval) do + type = get_bound(key, eval) + {{:bound, key}, type} + end + + def expand_result({:map, source, each_key, each_val}, type, eval) do + {source, source_type} = expand_mapping(source, type, eval) + + each_type = + case source_type do + {:array, type} -> + type + + type -> + raise ArgumentError, + "{:map, source, ...} must evaluate to an array type. Got: #{inspect(type)}" + end + + {each_key, eval} = + case each_key do + {:bind, key, conditions} when is_atom(key) -> + {conditions, _binds} = expand_condition(conditions, type, eval) + each_key = {:bind, key, conditions} + eval = put_in(eval.binds[key], each_type) + {each_key, eval} + + {:bind, key} when is_atom(key) -> + eval = put_in(eval.binds[key], each_type) + {each_key, eval} + + key when is_atom(key) -> + eval = put_in(eval.binds[key], each_type) + {each_key, eval} + end + + {each_val, type} = expand_result(each_val, source_type, eval) + {{:map, source, each_key, each_val}, type} + end + + # def expand_result({query_type, type, conditions}, type, eval) + # when query_type in [:query_one, :query_first] do + # conditions = expand_condition(conditions, type, eval) + # {{query_type, type, conditions}, Type.merge(type, nil)} + # end + + def expand_result({function, args}, type, eval) when is_function(function) do + args = + Enum.map(List.wrap(args), fn arg -> + {arg, _type} = expand_result(arg, type, eval) + arg + end) + + {{function, args}, :any} + end + + def expand_result({function, args, opts}, type, eval) when is_function(function) do + result_type = + case Keyword.pop(opts, :type) do + {type, []} -> + type + + {_, other_opts} -> + raise ArgumentError, "Unknown options: #{inspect(other_opts, pretty: true)}" + end + + args = + Enum.map(List.wrap(args), fn arg -> + {arg, _type} = expand_result(arg, type, eval) + arg + end) + + {{function, args}, result_type} + end + + def expand_result(tuple, type, eval) when is_tuple(tuple) do + case tuple do + {query_type, type, conditions} when query_type in [:query_one, :query_first] -> + {conditions, _binds} = expand_condition(conditions, type, eval) + {{query_type, type, conditions}, Type.merge(type, nil)} + + {query_type, type, conditions, opts} when query_type in [:query_one, :query_first] -> + {conditions, _binds} = expand_condition(conditions, type, eval) + {{query_type, type, conditions, opts}, Type.merge(type, nil)} + + {:query_all, type, conditions} -> + {conditions, _binds} = expand_condition(conditions, type, eval) + {{:query_all, type, conditions}, {:array, type}} + + {:query_all, type, conditions, opts} -> + {conditions, _binds} = expand_condition(conditions, type, eval) + {{:query_all, type, conditions, opts}, {:array, type}} + + tuple -> + {expanded, type} = + tuple + |> Tuple.to_list() + |> expand_result(type, eval) + + {List.to_tuple(expanded), type} + end + end + + def expand_result(other, _type, _eval) do + {other, Type.of(other)} + end + + defp expand_ref_path(path, type, eval) do + do_expand_ref_path(List.wrap(path), type, eval, []) + end + + defp do_expand_ref_path([], type, _eval, acc) do + {Enum.reverse(acc), type} + end + + defp do_expand_ref_path([:args, name | path], _type, eval, acc) when is_atom(name) do + arg = eval.args[name] + type = Type.of(arg) + do_expand_ref_path(path, type, eval, [name, :args | acc]) + end + + defp do_expand_ref_path([map | path], type, eval, acc) when is_map(map) do + {expanded, types} = + Enum.reduce(map, {%{}, %{}}, fn {key, val}, {map, types} -> + {expanded, type} = expand_ref_path(val, type, eval) + map = Map.put(map, key, expanded) + types = Map.put(types, key, type) + {map, types} + end) + + do_expand_ref_path(path, {:map, types}, eval, [expanded | acc]) + end + + defp do_expand_ref_path([list | path], type, eval, acc) when is_list(list) do + {expanded, types} = + Enum.reduce(list, {%{}, %{}}, fn + name, {map, types} when is_atom(name) -> + {expanded, type} = expand_atom(name, type, eval) + map = Map.put(map, name, expanded) + types = Map.put(types, name, type) + {map, types} + + other, _ -> + raise ArgumentError, + "A nested list in a {:ref, ...} can only contain atoms. Got " <> + inspect(other, pretty: true) + end) + + do_expand_ref_path(path, {:map, types}, eval, [expanded | acc]) + end + + defp do_expand_ref_path([name | path], type, eval, acc) when is_atom(name) do + {expanded, type} = expand_atom(name, type, eval) + do_expand_ref_path(path, type, eval, [expanded | acc]) + end + + defp expand_atom(name, {:array, type}, eval) do + {expanded, type} = expand_atom(name, type, eval) + {expanded, {:array, type}} + end + + defp expand_atom(name, [type, nil], eval) do + {expanded, type} = expand_atom(name, type, eval) + {expanded, Type.merge(type, nil)} + end + + defp expand_atom(:args, _type, eval) do + types = Map.new(eval.args, fn {key, val} -> {key, Type.of(val)} end) + {:args, {:map, types}} + end + + defp expand_atom(name, {:map, types}, _eval) when is_atom(name) do + type = + case Map.fetch(types, name) do + {:ok, type} -> + type + + :error -> + raise ArgumentError, "Type #{name} not found in {:map, #{inspect(types, pretty: true)}}" + end + + {name, type} + end + + defp expand_atom(name, type, eval) do + case Dx.Util.rules_for_predicate(name, type, eval) do + [] -> + case Dx.Util.Ecto.association_details(type, name) do + # can be Ecto.Association.Has or Ecto.Association.BelongsTo + %_{} = assoc -> + meta = + assoc + |> Map.from_struct() + |> Map.take([:ordered, :owner_key, :related_key, :unique]) + |> Map.put(:name, name) + + type = + case assoc.cardinality do + :one -> [assoc.queryable, nil] + :many -> {:array, assoc.queryable} + end + + {{:assoc, assoc.cardinality, assoc.queryable, meta}, type} + + _other -> + case Dx.Util.Ecto.field_details(type, name) do + nil -> + raise ArgumentError, + """ + Unknown field #{inspect(name)} on #{inspect(type)}.\ + """ + + field_type -> + {{:field, name}, field_type} + end + end + + rules -> + eval = %{eval | root_type: type} + + {expanded_rules, predicate_type} = + Enum.map_reduce(rules, [], fn + {condition, result}, types -> + {condition, binds} = expand_condition(condition, type, eval) + eval = %{eval | binds: Map.merge(eval.binds, binds)} + {result, result_type} = expand_result(result, type, eval) + {{result, condition}, Type.merge(types, result_type)} + + result, types -> + {result, result_type} = expand_result(result, type, eval) + {result, Type.merge(types, result_type)} + end) + + {{:predicate, %{name: name}, expanded_rules}, predicate_type} + end + end + + def expand_condition(map, type, eval) when is_map(map) do + expand_condition({:all, Enum.to_list(map)}, type, eval) + end + + def expand_condition({:all, []}, _type, _eval) do + {{:all, []}, %{}} + end + + def expand_condition({:all, [condition]}, type, eval) do + expand_condition(condition, type, eval) + end + + def expand_condition({:all, conditions}, type, eval) do + {conditions, binds} = expand_condition(Enum.to_list(conditions), type, eval) + {{:all, conditions}, binds} + end + + def expand_condition(list, type, eval) when is_list(list) do + Enum.map_reduce(list, %{}, fn condition, binds -> + {condition, new_binds} = expand_condition(condition, type, eval) + {condition, Map.merge(binds, new_binds)} + end) + end + + def expand_condition({:bind, bind_key, condition}, type, eval) do + {condition, binds} = expand_condition(condition, type, eval) + {{:bind, bind_key, condition}, Map.put(binds, bind_key, type)} + end + + def expand_condition({:bound, key}, _type, eval) do + {get_bound(key, eval), %{}} + end + + def expand_condition({left, {:bind, bind_key, right}}, type, eval) do + {left, left_type} = + case expand_mapping(left, type, eval) do + {left, {:array, elem_type}} -> {left, elem_type} + other -> other + end + + eval = put_in(eval.binds[bind_key], left_type) + {right, binds} = expand_condition(right, left_type, eval) + {{left, {:bind, bind_key, right}}, Map.put(binds, bind_key, left_type)} + end + + def expand_condition({key, other}, type, eval) when key in @primitives_1 do + {other, binds} = expand_condition(other, type, eval) + {{key, other}, binds} + end + + def expand_condition({left, right}, type, eval) when is_map(right) do + {left, left_type} = expand_mapping(left, type, eval) + {right, binds} = expand_condition(right, left_type, eval) + {{left, right}, binds} + end + + def expand_condition({left, right}, type, eval) do + {left, _type} = expand_mapping(left, type, eval) + {right, _type} = expand_result(right, eval.root_type, eval) + {{left, right}, %{}} + end + + # boolean shorthand + def expand_condition(name, type, eval) when is_atom(name) do + {expanded, _type} = expand_atom(name, type, eval) + {{expanded, true}, %{}} + end + + def expand_condition(other, type, _eval) do + raise ArgumentError, """ + Unknown condition syntax: + + #{inspect(other, pretty: true)} + Evaluated for type: #{inspect(type)} + """ + end + + defp get_bound(key, eval) do + if not is_atom(key) do + raise ArgumentError, "Binding reference can only be an atom. Got #{inspect(key)}" + end + + case Map.fetch(eval.binds, key) do + {:ok, bind} -> + bind + + :error -> + raise ArgumentError, "Unknown binding reference: #{inspect(key)}" + end + end +end diff --git a/lib/dx/schema/type.ex b/lib/dx/schema/type.ex new file mode 100644 index 00000000..666e0012 --- /dev/null +++ b/lib/dx/schema/type.ex @@ -0,0 +1,188 @@ +defmodule Dx.Schema.Type do + @moduledoc """ + Represents the type of a data structure or literal. + + ## Structure + + _(* = not implemented yet)_ + + ### Basic types + + | _Type_ | _Example_ | + |------------|-----------| + | `:any` | | + | `:integer` | `7` | + | `:float` | `3.14` | + | `:boolean` | `true` | + | `:string` | `"foo"` | + | `:atom` | `:foo` | + | `nil` | `nil` | + + ### Nested types + + | _Type_ | _Example_ | + |----------------------------------------------|---------------------------------| + | `{:array, :integer}` | `[1, 2, 3]` | + | `{:tuple, {:atom, :string}}` | `{:ok, "foo"}` | + | `{:map, {:atom, :float}}` * | `%{foo: 1.1, bar: 2.7}` | + | `{:map, {:atom, :float}, %{foo: :string}}` * | `%{foo: "bar", bar: 2.7}` | + | `{:map, %{foo: :string}}` | `%{foo: "bar"}` | + | `MyApp.Struct` | `%MyApp.Struct{foo: 1, bar: 2}` | + + ### Union types + + Represented by a list. + + | _Type_ | _Example_ | + |--------------------------------------------|---------------------------------| + | `[:integer, :float, nil]` | `1.0` | + | `{:array, [:integer, :float, nil]}` | `[1.0, nil, 3, 4]` | + + ### Subset types + + | _Type_ | _Example_ | + |------------------------------------------------------------|-----------------------------------| + | `{:integer, [1, 2, 3]}` | `2` | + | `{:integer, {:gte, 0}}` * | `0` | + | `{:integer, {:all, [{:gte, 0}, {:lt, 7}]}}` * | `6` | + | `{:struct, MyApp.Struct, [:foo]}` * | `%MyApp.Struct{foo: 1}` | + | `{:struct, MyApp.Struct, [:foo, :bar], %{bar: :float}}` * | `%MyApp.Struct{foo: 1, bar: 1.7}` | + | `{:struct, MyApp.Struct, %{foo: :integer, bar: :float}}` * | `%MyApp.Struct{foo: 1, bar: 1.7}` | + + """ + + alias Dx.Util + + @doc """ + ## Examples + + iex> of(87) + {:integer, 87} + + iex> of(1.2) + {:float, 1.2} + + iex> of("foo") + {:string, "foo"} + + iex> of(:foo) + {:atom, :foo} + + iex> of(false) + {:boolean, false} + + iex> of(nil) + nil + + iex> of(%Ecto.Query{}) + Ecto.Query + + iex> of(%{foo: nil, bar: 1}) + {:map, %{foo: nil, bar: {:integer, 1}}} + """ + def of(integer) when is_integer(integer), do: {:integer, integer} + def of(float) when is_float(float), do: {:float, float} + def of(string) when is_binary(string), do: {:string, string} + def of(boolean) when is_boolean(boolean), do: {:boolean, boolean} + def of(nil), do: nil + def of(atom) when is_atom(atom), do: {:atom, atom} + def of(%type{}), do: type + def of(function) when is_function(function), do: :any + def of(map) when is_map(map), do: {:map, Map.new(map, fn {k, v} -> {k, of(v)} end)} + + @doc """ + Merges two types, returning a type that is a superset of both. + + ## Examples + + iex> merge(:string, :any) + :any + + iex> merge([], :integer) + :integer + + iex> merge(:boolean, []) + :boolean + + iex> merge(:string, :integer) + [:string, :integer] + + iex> merge(:string, :string) + :string + + iex> merge({:atom, :foo}, {:atom, :bar}) + {:atom, [:foo, :bar]} + + iex> merge({:boolean, true}, {:boolean, false}) + :boolean + + iex> merge(:atom, {:atom, [:foo, :bar]}) + {:atom, [:foo, :bar]} + + iex> merge({:atom, :foo}, :atom) + {:atom, :foo} + + iex> merge({:atom, [:bar, :baz]}, {:atom, [:foo, :bar]}) + {:atom, [:bar, :baz, :foo]} + """ + def merge(left, right) do + do_merge(left, right, true) + end + + defp do_merge([], right, _union) do + right + end + + defp do_merge(left, [], _union) do + left + end + + defp do_merge(:any, _right, _union) do + :any + end + + defp do_merge(_left, :any, _union) do + :any + end + + defp do_merge(same, same, _union) do + same + end + + defp do_merge(type, {type, condition}, _union) do + {type, condition} + end + + defp do_merge({type, condition}, type, _union) do + {type, condition} + end + + defp do_merge({:boolean, _}, {:boolean, _}, _union) do + :boolean + end + + defp do_merge({type, left}, {type, right}, union) do + {type, do_merge(left, right, union)} + end + + defp do_merge(list, [type | rest], union) when is_list(list) do + add(list, type) + |> do_merge(rest, union) + end + + defp do_merge(list, type, _union) when is_list(list) do + add(list, type) + end + + defp do_merge(left, right, true) do + [left, right] + end + + defp do_merge(_left, _right, false) do + :error + end + + defp add(list, type) do + Util.Enum.try_update_or_append(list, &do_merge(&1, type, false), type) + end +end diff --git a/lib/dx/util/enum.ex b/lib/dx/util/enum.ex index 13e4f575..6594b9ad 100644 --- a/lib/dx/util/enum.ex +++ b/lib/dx/util/enum.ex @@ -13,4 +13,63 @@ defmodule Dx.Util.Enum do def zip([], [], _fun, reverse_result) do Enum.reverse(reverse_result) end + + @doc """ + Finds an element in `enum` for which `matcher` returns a truthy value. + If an element is found, runs `updater` on it and replaces it in the `enum`. + If no element is found, appends `append` at the end of `enum`. + + ## Examples + + iex> update_or_append([1, 2, 3, 4], &(&1 > 2), &(&1 + 2), 0) + [1, 2, 5, 4] + + iex> update_or_append([1, 2, 3, 4], &(&1 > 4), &(&1 + 2), 0) + [1, 2, 3, 4, 0] + """ + def update_or_append(enum, matcher, updater, append) do + do_update_or_append(enum, matcher, updater, append, []) + end + + defp do_update_or_append([], _match, _update, append, acc) do + [append | acc] + |> Enum.reverse() + end + + defp do_update_or_append([elem | rest], match, update, append, acc) do + if match.(elem) do + elem = update.(elem) + Enum.reverse(acc, [elem | rest]) + else + do_update_or_append(rest, match, update, append, [elem | acc]) + end + end + + @doc """ + Finds an element in `enum` for which `updater` does not return `:error`. + If an element is found, replaces it with the `updater` result in the `enum`. + If no element is found, appends `append` at the end of `enum`. + + ## Examples + + iex> try_update_or_append([1, 2, 3, 4, 5], &if(&1 > 2, do: &1 + 3, else: :error), 0) + [1, 2, 6, 4, 5] + + iex> try_update_or_append([1, 2, 3, 4], fn _ -> :error end, 0) + [1, 2, 3, 4, 0] + """ + def try_update_or_append(enum, updater, append) do + do_try_update_or_append(enum, updater, append, []) + end + + defp do_try_update_or_append([], _updater, append, acc) do + Enum.reverse(acc, [append]) + end + + defp do_try_update_or_append([elem | rest], updater, append, acc) do + case updater.(elem) do + :error -> do_try_update_or_append(rest, updater, append, [elem | acc]) + elem -> Enum.reverse(acc, [elem | rest]) + end + end end diff --git a/mix.exs b/mix.exs index fd030d2f..30236d75 100644 --- a/mix.exs +++ b/mix.exs @@ -48,21 +48,38 @@ defmodule Dx.MixProject do defp test_apps(_), do: [] defp deps do - [ - # util - {:typed_struct, ">= 0.0.0"}, - {:dataloader, "~> 1.0.0"}, + ecto_deps() ++ + [ + # util + {:typed_struct, ">= 0.0.0"}, + {:dataloader, "~> 1.0.0"}, - # adapters - {:ecto, ">= 3.4.3 and < 4.0.0", optional: true}, - {:ecto_sql, "~> 3.0", optional: true}, + # dev & test + {:postgrex, "~> 0.14", only: :test, runtime: false}, + {:timex, "~> 3.6", only: :test, runtime: false}, + {:refinery, "~> 0.1.0", github: "dx-beam/refinery", only: :test}, + {:ex_doc, "~> 0.24", only: :dev, runtime: false} + # {:elixir_sense, github: "elixir-lsp/elixir_sense", only: [:dev, :test]} + ] + end - # dev & test - {:postgrex, "~> 0.14", only: :test, runtime: false}, - {:timex, "~> 3.6", only: :test, runtime: false}, - {:refinery, "~> 0.1.0", github: "dx-beam/refinery", only: :test}, - {:ex_doc, "~> 0.24", only: :dev, runtime: false} - ] + defp ecto_deps do + case System.get_env("ECTO") do + "." <> _ = path -> + [ + {:ecto, path: Path.join(path, "ecto"), override: true}, + {:ecto_sql, path: Path.join(path, "ecto_sql")} + ] + + nil -> + [{:ecto, ">= 3.4.3 and < 4.0.0", optional: true}, {:ecto_sql, "~> 3.0", optional: true}] + + github_user -> + [ + {:ecto, github: github_user <> "/ecto", override: true}, + {:ecto_sql, github: github_user <> "/ecto_sql"} + ] + end end def docs do @@ -82,8 +99,8 @@ defmodule Dx.MixProject do defp aliases do [ "ecto.setup": ["ecto.create", "ecto.migrate"], - "ecto.reset": ["ecto.drop", "ecto.setup"], - test: ["ecto.reset", "test"] + "ecto.reset": ["ecto.drop", "ecto.setup"] + # test: ["ecto.reset", "test"] ] end end diff --git a/mix.lock b/mix.lock index 206f7a06..c2b1c256 100644 --- a/mix.lock +++ b/mix.lock @@ -3,11 +3,11 @@ "combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"}, "connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"}, "dataloader": {:hex, :dataloader, "1.0.10", "a42f07641b1a0572e0b21a2a5ae1be11da486a6790f3d0d14512d96ff3e3bbe9", [:mix], [{:ecto, ">= 3.4.3 and < 4.0.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:telemetry, "~> 1.0 or ~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "54cd70cec09addf4b2ace14cc186a283a149fd4d3ec5475b155951bf33cd963f"}, - "db_connection": {:hex, :db_connection, "2.4.1", "6411f6e23f1a8b68a82fa3a36366d4881f21f47fc79a9efb8c615e62050219da", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ea36d226ec5999781a9a8ad64e5d8c4454ecedc7a4d643e4832bf08efca01f00"}, + "db_connection": {:hex, :db_connection, "2.4.2", "f92e79aff2375299a16bcb069a14ee8615c3414863a6fef93156aee8e86c2ff3", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "4fe53ca91b99f55ea249693a0229356a08f4d1a7931d8ffa79289b145fe83668"}, "decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"}, "earmark_parser": {:hex, :earmark_parser, "1.4.25", "2024618731c55ebfcc5439d756852ec4e85978a39d0d58593763924d9a15916f", [:mix], [], "hexpm", "56749c5e1c59447f7b7a23ddb235e4b3defe276afc220a6227237f3efe83f51e"}, - "ecto": {:hex, :ecto, "3.7.1", "a20598862351b29f80f285b21ec5297da1181c0442687f9b8329f0445d228892", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "d36e5b39fc479e654cffd4dbe1865d9716e4a9b6311faff799b6f90ab81b8638"}, - "ecto_sql": {:hex, :ecto_sql, "3.7.2", "55c60aa3a06168912abf145c6df38b0295c34118c3624cf7a6977cd6ce043081", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.7.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.4.0 or ~> 0.5.0 or ~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0 or ~> 0.16.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3c218ea62f305dcaef0b915fb56583195e7b91c91dcfb006ba1f669bfacbff2a"}, + "ecto": {:git, "https://github.com/elixir-ecto/ecto.git", "4f541892e738a4dfad66601e19db3567e3335c2c", []}, + "ecto_sql": {:git, "https://github.com/elixir-ecto/ecto_sql.git", "c8686ae3edc4285361af89ee8875ffa73b255d4d", []}, "ex_doc": {:hex, :ex_doc, "0.28.3", "6eea2f69995f5fba94cd6dd398df369fe4e777a47cd887714a0976930615c9e6", [:mix], [{:earmark_parser, "~> 1.4.19", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "05387a6a2655b5f9820f3f627450ed20b4325c25977b2ee69bed90af6688e718"}, "gettext": {:hex, :gettext, "0.19.1", "564953fd21f29358e68b91634799d9d26989f8d039d7512622efb3c3b1c97892", [:mix], [], "hexpm", "10c656c0912b8299adba9b061c06947511e3f109ab0d18b44a866a4498e77222"}, "hackney": {:hex, :hackney, "1.18.1", "f48bf88f521f2a229fc7bae88cf4f85adc9cd9bcf23b5dc8eb6a1788c662c4f6", [:rebar3], [{:certifi, "~>2.9.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "a4ecdaff44297e9b5894ae499e9a070ea1888c84afdd1fd9b7b2bc384950128e"}, @@ -19,10 +19,10 @@ "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"}, "nimble_parsec": {:hex, :nimble_parsec, "1.2.3", "244836e6e3f1200c7f30cb56733fd808744eca61fd182f731eac4af635cc6d0b", [:mix], [], "hexpm", "c8d789e39b9131acf7b99291e93dae60ab48ef14a7ee9d58c6964f59efb570b0"}, "parse_trans": {:hex, :parse_trans, "3.3.1", "16328ab840cc09919bd10dab29e431da3af9e9e7e7e6f0089dd5a2d2820011d8", [:rebar3], [], "hexpm", "07cd9577885f56362d414e8c4c4e6bdf10d43a8767abb92d24cbe8b24c54888b"}, - "postgrex": {:hex, :postgrex, "0.16.1", "f94628a32c571266f53cd1e5fca705e626e2417bf1eee6f868985d14e874160a", [:mix], [{:connection, "~> 1.1", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "6b225df32c857b9430619dbe30200a7ae664e23415a771ae9209396ee8eeee64"}, + "postgrex": {:hex, :postgrex, "0.16.4", "26d998467b4a22252285e728a29d341e08403d084e44674784975bb1cd00d2cb", [:mix], [{:connection, "~> 1.1", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "3234d1a70cb7b1e0c95d2e242785ec2a7a94a092bbcef4472320b950cfd64c5f"}, "refinery": {:git, "https://github.com/dx-beam/refinery.git", "721685494ff0d3c3908ca0a739c9b6fdb14d05e8", []}, "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"}, - "telemetry": {:hex, :telemetry, "1.0.0", "0f453a102cdf13d506b7c0ab158324c337c41f1cc7548f0bc0e130bbf0ae9452", [:rebar3], [], "hexpm", "73bc09fa59b4a0284efb4624335583c528e07ec9ae76aca96ea0673850aec57a"}, + "telemetry": {:hex, :telemetry, "1.1.0", "a589817034a27eab11144ad24d5c0f9fab1f58173274b1e9bae7074af9cbee51", [:rebar3], [], "hexpm", "b727b2a1f75614774cff2d7565b64d0dfa5bd52ba517f16543e6fc7efcc0df48"}, "timex": {:hex, :timex, "3.7.6", "502d2347ec550e77fdf419bc12d15bdccd31266bb7d925b30bf478268098282f", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "a296327f79cb1ec795b896698c56e662ed7210cc9eb31f0ab365eb3a62e2c589"}, "typed_struct": {:hex, :typed_struct, "0.2.1", "e1993414c371f09ff25231393b6430bd89d780e2a499ae3b2d2b00852f593d97", [:mix], [], "hexpm", "8f5218c35ec38262f627b2c522542f1eae41f625f92649c0af701a6fab2e11b3"}, "tzdata": {:hex, :tzdata, "1.1.1", "20c8043476dfda8504952d00adac41c6eda23912278add38edc140ae0c5bcc46", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "a69cec8352eafcd2e198dea28a34113b60fdc6cb57eb5ad65c10292a6ba89787"}, diff --git a/test/dx/bind_test.exs b/test/dx/bind_test.exs index a3aed7f4..4cb1c309 100644 --- a/test/dx/bind_test.exs +++ b/test/dx/bind_test.exs @@ -92,7 +92,7 @@ defmodule Dx.BindTest do end test "fails when using unbound key", %{user: user} do - assert_raise(KeyError, fn -> + assert_raise(ArgumentError, fn -> Dx.get!(user, :failing_data_for_author, extra_rules: UserRules, args: [created_by_id: 2]) end) end @@ -117,6 +117,31 @@ defmodule Dx.BindTest do describe "matching associated predicate" do test "returns bound value on root level of the assigns", %{user: user} do + import Test.Support.SchemaHelpers + + assert {{:predicate, %{name: :indirect_data_for_author}, + [ + {{:bound, :result}, + {:bind, :result, + {{:assoc, :many, Dx.Test.Schema.List, + %{ + name: :lists, + ordered: false, + owner_key: :id, + related_key: :created_by_id, + unique: true + }}, + {:bind, :result, + {{:predicate, %{name: :by_author?}, + [true: {{:field, :created_by_id}, {:ref, [:args, :created_by_id]}}]}, + true}}}}} + ]}, + User} = + expand_mapping(:indirect_data_for_author, User, + extra_rules: UserRules, + args: [created_by_id: 2] + ) + assert Dx.get!(user, :indirect_data_for_author, extra_rules: UserRules, args: [created_by_id: 2] diff --git a/test/dx/ecto/query/dynamic_test.exs b/test/dx/ecto/query/dynamic_test.exs new file mode 100644 index 00000000..e167ae8e --- /dev/null +++ b/test/dx/ecto/query/dynamic_test.exs @@ -0,0 +1,555 @@ +defmodule Dx.Ecto.DynamicTest do + use Dx.Test.DataCase, async: true + + alias Dx.Ecto.Query + alias Dx.Test.Repo + + import Ecto.Query + + setup do + archived_at = DateTime.utc_now() |> DateTime.truncate(:second) + list = create(List, %{archived_at: archived_at, created_by: %{}}) + + list2 = + create(List, %{ + title: "FANCY TEMPLATE", + from_template: %{title: "FANCY TEMPLATE"}, + created_by: %{} + }) + + tasks = + Enum.map(0..1, fn i -> + create(Task, %{list_id: list.id, due_on: today(i), created_by_id: list.created_by_id}) + end) + + [ + list: list, + list2: list2, + archived_at: archived_at, + title: list.title, + title2: list2.title, + tasks: tasks + ] + end + + # test "select comparison" do + # static = from(t in Task, select: %{title: t.title, same: t.title == t.desc, desc: t.desc}) + + # # assert Query.to_sql(Repo, static) =~ """ + # # SELECT l0."archived_at" FROM \ + # # """ + + # assert [ + # %{title: "My Task", desc: nil, same: false}, + # %{title: "My Task", desc: nil, same: false}, + # ] = Repo.all(static) + # end + + test "static fields", %{archived_at: archived_at} do + static = from(l in List, as: :list, select: [:archived_at]) + + assert Query.to_sql(Repo, static) =~ """ + SELECT l0."archived_at" FROM \ + """ + + assert [%{archived_at: ^archived_at}, %{title: nil}] = Repo.all(static) + end + + test "merge static fields", %{archived_at: archived_at, title: title, title2: title2} do + static = from(l in List, as: :list, select: [:archived_at], select_merge: [:title]) + + assert Query.to_sql(Repo, static) =~ """ + SELECT l0."archived_at", l0."title" FROM \ + """ + + assert [%{archived_at: ^archived_at, title: ^title}, %{archived_at: nil, title: ^title2}] = + Repo.all(static) + end + + test "merge static map", %{archived_at: archived_at, title: title, title2: title2} do + static = + from( + l in List, + as: :list, + select: %{}, + select_merge: %{a: map(l, [:archived_at])}, + select_merge: %{c: map(l, [:title])} + ) + + assert Query.to_sql(Repo, static) =~ """ + SELECT l0."archived_at", l0."title" FROM \ + """ + + assert [ + %{a: %{archived_at: ^archived_at}, c: %{title: ^title}}, + %{a: %{archived_at: nil}, c: %{title: ^title2}} + ] = Repo.all(static) + end + + test "from subquery", %{title: title, title2: title2} do + subquery = select(List, [l], %{t: l.title, l: "literal"}) + + query = + from(l in subquery(subquery), + select: %{x: l.t, y: l.l, z: "otherliteral"} + ) + + assert [ + %{x: ^title, y: "literal", z: "otherliteral"}, + %{x: ^title2, y: "literal", z: "otherliteral"} + ] = Repo.all(query) + end + + test "dynamic fields", %{archived_at: archived_at} do + fields = [:archived_at] + + query = from(l in List, as: :list, select: ^fields) + + assert Query.to_sql(Repo, query) =~ """ + SELECT l0."archived_at" FROM \ + """ + + assert [%{archived_at: ^archived_at}, %{title: nil}] = Repo.all(query) + end + + test "static field", %{archived_at: archived_at} do + static = from(l in List, as: :list, select: %{title: l.archived_at}) + + assert Query.to_sql(Repo, static) =~ """ + SELECT l0."archived_at" FROM \ + """ + + assert [%{title: ^archived_at}, %{title: nil}] = Repo.all(static) + end + + test "dynamic field", %{archived_at: archived_at} do + as = :list + field = :archived_at + + ref = dynamic(field(as(^as), ^field)) + query = from(l in List, as: :list, select: ^%{title: ref}) + # assert query.select.take == static.select.take + + assert Query.to_sql(Repo, query) =~ """ + SELECT l0."archived_at" FROM \ + """ + + assert [%{title: ^archived_at}, %{title: nil}] = Repo.all(query) + end + + test "fixed field in interpolation" do + query = from(l in List, as: :list, select: ^%{title: 3}) + query2 = from(l in List, as: :list, select: %{title: 3}) + assert query.select.expr == query2.select.expr + assert query.select.params == query2.select.params + + assert [%{title: 3}, %{title: 3}] = Repo.all(query) + end + + # test "static fragment", %{archived_at: archived_at} do + # static = + # from(l in List, + # as: :list, + # select: %{ + # title: l.archived_at, + # template_name: + # fragment( + # "CASE WHEN ? THEN ? ELSE ? END", + # is_nil(l.from_template_id), + # "", + # "template_name" + # ) + # }, + # order_by: [asc: :id] + # ) + + # assert [ + # %{title: ^archived_at, template_name: ""}, + # %{title: nil, template_name: "template_name"} + # ] = Repo.all(static) + # end + + test "dynamic fragment", %{archived_at: archived_at} do + ref0 = dynamic(field(as(:list), :archived_at)) + + ref = + dynamic( + [l], + fragment( + "CASE WHEN ? THEN ? ELSE ? END", + is_nil(l.from_template_id), + "", + "template_name" + ) + ) + + query = + from(l in List, as: :list, select: ^%{title: ref0, template_name: ref}, order_by: [asc: :id]) + + assert [ + %{title: ^archived_at, template_name: ""}, + %{title: nil, template_name: "template_name"} + ] = Repo.all(query) + end + + test "dynamic fragment and static field", %{archived_at: archived_at} do + ref = + dynamic( + [l], + fragment( + "CASE WHEN ? THEN ? ELSE ? END", + is_nil(l.from_template_id), + "", + "template_name" + ) + ) + + query = + from(l in List, + as: :list, + select: %{title: l.archived_at}, + select_merge: ^%{template_name: ref}, + order_by: [asc: :id] + ) + + # assert Query.to_sql(Repo, query) =~ """ + # FROM "lists" AS l0 WHERE (exists((SELECT \ + # """ + + assert [ + %{title: ^archived_at, template_name: ""}, + %{title: nil, template_name: "template_name"} + ] = Repo.all(query) + end + + test "merges dynamic fragment into struct", %{archived_at: archived_at} do + ref = + dynamic( + [l], + fragment( + "CASE WHEN ? THEN ? ELSE ? END", + is_nil(l.from_template_id), + "", + "template_name" + ) + ) + + query = + from(l in List, + as: :list, + select: [:title, :archived_at], + select_merge: ^%{title: ref}, + order_by: [asc: :id] + ) + + # assert Query.to_sql(Repo, query) =~ """ + # FROM "lists" AS l0 WHERE (exists((SELECT \ + # """ + + assert [ + %{title: "", archived_at: ^archived_at}, + %{title: "template_name", archived_at: nil} + ] = Repo.all(query) + end + + # test "merges dynamic fragment and static field", %{archived_at: archived_at} do + # ref = + # dynamic( + # [l], + # fragment( + # "CASE WHEN ? THEN ? ELSE ? END", + # is_nil(l.from_template_id), + # "", + # "template_name" + # ) + # ) + + # query = + # from(l in List, + # as: :list, + # select: merge(%{title: l.archived_at}, ^%{template_name: ref}), + # order_by: [asc: :id] + # ) + + # # assert Query.to_sql(Repo, query) =~ """ + # # FROM "lists" AS l0 WHERE (exists((SELECT \ + # # """ + + # assert [ + # %{title: ^archived_at, template_name: ""}, + # %{title: nil, template_name: "template_name"} + # ] = Repo.all(query) + # end + + test "dynamic where with subquery" do + subquery = + from(t in ListTemplate, + where: parent_as(^:list).from_template_id == t.id, + select: %{title: t.title} + ) + + where = dynamic([l], subquery(subquery) == l.title) + query = from(l in List, as: :list, where: ^where) + + assert [%{title: "FANCY TEMPLATE"}] = Repo.all(query) + end + + test "dynamic fragment with subquery" do + subquery = + from(t in ListTemplate, + where: parent_as(^:list).from_template_id == t.id, + select: %{title: t.title} + ) + + ref = + dynamic( + [l], + fragment( + "CASE WHEN ? THEN ? ELSE ? END", + is_nil(l.from_template_id), + "", + subquery(subquery) + ) + ) + + query = from(l in List, as: :list, select: ^%{template_name: ref}, order_by: [asc: :id]) + + assert [ + %{template_name: ""}, + %{template_name: "FANCY TEMPLATE"} + ] = Repo.all(query) + end + + test "dynamic fragment with subquery and field", %{archived_at: archived_at} do + subquery = + from(t in ListTemplate, + where: parent_as(^:list).from_template_id == t.id, + select: %{title: t.title} + ) + + ref = + dynamic( + [l], + fragment( + "CASE WHEN ? THEN ? ELSE ? END", + is_nil(l.from_template_id), + "", + subquery(subquery) + ) + ) + + query = + from(l in List, + as: :list, + select: %{title: l.archived_at}, + select_merge: ^%{template_name: ref}, + order_by: [asc: :id] + ) + + assert [ + %{title: ^archived_at, template_name: ""}, + %{title: nil, template_name: "FANCY TEMPLATE"} + ] = Repo.all(query) + end + + test "dynamic fragment with multiple subqueries and field", %{ + list: list, + archived_at: archived_at, + tasks: tasks + } do + created_by_id = list.created_by_id + + subquery0 = + from(t in Task, + where: t.list_id == parent_as(:list).id and t.created_by_id == ^created_by_id, + select: max(t.due_on) + ) + + subquery1 = + from(t in ListTemplate, where: parent_as(^:list).from_template_id == t.id, select: t.title) + + subquery2 = from(u in User, where: parent_as(^:list).created_by_id == u.id, select: u.email) + + ref = + dynamic( + [l], + fragment( + "CASE WHEN ? THEN ? ELSE ? END", + is_nil(l.from_template_id), + "", + subquery(subquery1) + ) + ) + + query = + from(l in List, + as: :list, + select: %{ + title: l.archived_at, + val0: 0, + maxdue: subquery(subquery0), + val1: true, + user_email: subquery(subquery2), + val2: nil + }, + select_merge: + ^%{ + # val3: 8, + template_name: ref, + val4: "gr8" + }, + select_merge: + ^%{ + # val5: 1.337, + maxdue: nil, + # val6: [1, 2, 3], + user_email: subquery(subquery1), + # val3: 16 + }, + select_merge: + ^%{ + user_email: subquery(subquery2), + maxdue: subquery(subquery0) + }, + order_by: [asc: :id] + ) + + max_due_on = tasks |> Enum.map(& &1.due_on) |> Enum.max(Date) + + assert [ + %{ + title: ^archived_at, + maxdue: ^max_due_on, + template_name: "", + user_email: "alice@acme.org" + }, + %{ + title: nil, + maxdue: nil, + template_name: "FANCY TEMPLATE", + user_email: "alice@acme.org" + } + ] = Repo.all(query) + end + + # test "where with subquery", %{list: list, archived_at: archived_at} do + # created_by_id = list.created_by_id + # subquery = from(t in Task, where: t.list_id == parent_as(:list).id and t.created_by_id == ^created_by_id, select: max(t.due_on)) + # static = + # from(l in List, + # as: :list, + # where: subquery(subquery) == fragment("?::date", l.archived_at) + # ) + + # assert Query.to_sql(Repo, static) =~ """ + # FROM "lists" AS l0 WHERE (exists((SELECT \ + # """ + + # assert [%{archived_at: ^archived_at}] = Repo.all(static) + # end + + # test "where with exists", %{archived_at: archived_at} do + # # created_by_id = list.created_by_id + # # subquery = from(t in Task, where: t.list_id == parent_as(:list).id and t.created_by_id == ^created_by_id, select: max(t.due_on)) + # static = + # from(l in List, + # as: :list, + # where: exists(from(t in Task, where: t.list_id == parent_as(:list).id)) + # ) + + # assert Query.to_sql(Repo, static) =~ """ + # FROM "lists" AS l0 WHERE (exists((SELECT \ + # """ + + # assert [%{archived_at: ^archived_at}] = Repo.all(static) + # end + + # test "where with subquery comparison", %{archived_at: archived_at, tasks: tasks} do + # # created_by_id = list.created_by_id + # # subquery = from(t in Task, where: t.list_id == parent_as(:list).id and t.created_by_id == ^created_by_id, select: max(t.due_on)) + # subquery = from(t in Task, where: t.list_id == parent_as(:list).id, select: max(t.due_on)) + # max_due_on = tasks |> Enum.map(& &1.due_on) |> Enum.max(Date) + # static = from(l in List, as: :list, where: subquery(subquery) == ^max_due_on) + + # assert Query.to_sql(Repo, static) =~ """ + # FROM "lists" AS l0 WHERE ((SELECT \ + # """ + + # assert [%{archived_at: ^archived_at}] = Repo.all(static) + # end + + # test "static exists", %{archived_at: archived_at} do + # static = + # from(l in List, + # as: :list, + # select: %{ + # title: l.archived_at, + # hastasks: exists(from(t in Task, where: t.list_id == parent_as(:list).id)) + # } + # ) + + # assert [ + # %{title: ^archived_at, hastasks: true}, + # %{title: nil, hastasks: false} + # ] = Repo.all(static) + # end + + # test "static exists2", %{archived_at: archived_at} do + # subquery = from(t in Task, where: t.list_id == parent_as(:list).id) + + # static = + # from(l in List, as: :list, select: %{title: l.archived_at, hastasks: exists(subquery)}) + + # assert [ + # %{title: ^archived_at, hastasks: true}, + # %{title: nil, hastasks: false} + # ] = Repo.all(static) + # end + + # test "static subquery", %{list: list, archived_at: archived_at, tasks: tasks} do + # created_by_id = list.created_by_id + + # subquery = + # from(t in Task, + # where: t.list_id == parent_as(:list).id and t.created_by_id == ^created_by_id, + # select: max(t.due_on) + # ) + + # static = + # from(l in List, as: :list, select: %{title: l.archived_at, maxdue: subquery(subquery)}) + + # max_due_on = tasks |> Enum.map(& &1.due_on) |> Enum.max(Date) + + # assert [ + # %{title: ^archived_at, maxdue: ^max_due_on}, + # %{title: nil, maxdue: nil} + # ] = Repo.all(static) + # end + + # # test "raises on non-simple subquery comparison", %{list: list, archived_at: archived_at, tasks: tasks} do + # # # created_by_id = list.created_by_id + # # # subquery = from(t in Task, where: t.list_id == parent_as(:list).id and t.created_by_id == ^created_by_id, select: max(t.due_on)) + # # subquery = from(t in Task, where: t.list_id == parent_as(:list).id, select: %{due_on: max(t.due_on), due_min: min(t.due_on)}) + # # max_due_on = tasks |> Enum.map(& &1.due_on) |> Enum.max(Date) + # # static = from(l in List, as: :list, where: subquery(subquery) == ^max_due_on) + + # # assert_raise ArgumentError, fn -> + # # Repo.all(static) + # # end + # # end + + # # test "raises on non-simple select of subquery used in select", %{list: list, archived_at: archived_at, tasks: tasks} do + # # subquery1 = from(t in ListTemplate, where: parent_as(^:list).from_template_id == t.id, select: %{title: t.title}) + # # ref = dynamic([l], fragment( + # # "CASE WHEN ? THEN ? ELSE ? END", + # # is_nil(l.from_template_id), + # # "", + # # subquery(subquery1) + # # )) + # # query = from(l in List, as: :list, select: %{title: l.archived_at, template_name: ^ref}, order_by: [asc: :id]) + + # # assert_raise ArgumentError, fn -> + # # Repo.all(query) + # # end + # # end +end diff --git a/test/dx/ecto/query/elixir_conf_test.exs b/test/dx/ecto/query/elixir_conf_test.exs new file mode 100644 index 00000000..ff2f5579 --- /dev/null +++ b/test/dx/ecto/query/elixir_conf_test.exs @@ -0,0 +1,43 @@ +defmodule Dx.Ecto.ElixirConfTest do + use Dx.Test.DataCase, async: true + + alias Dx.Ecto.Query + alias Dx.Test.Repo + + import Ecto.Query + + setup do + archived_at = DateTime.utc_now() |> DateTime.truncate(:second) + list = create(List, %{archived_at: archived_at, created_by: %{}}) + + list2 = + create(List, %{ + title: "FANCY TEMPLATE", + from_template: %{title: "FANCY TEMPLATE"}, + created_by: %{} + }) + + tasks = + Enum.map(0..1, fn i -> + create(Task, %{list_id: list.id, due_on: today(i), created_by_id: list.created_by_id}) + end) + + [ + list: list, + list2: list2, + archived_at: archived_at, + title: list.title, + title2: list2.title, + tasks: tasks + ] + end + + test "" do + from(l in List, + select: %{ + name: l.name, + task_count: count(assoc(l, :tasks)) + } + ) + end +end diff --git a/test/dx/ecto/query/query_test.exs b/test/dx/ecto/query/query_test.exs index 03e8de43..4b4caeea 100644 --- a/test/dx/ecto/query/query_test.exs +++ b/test/dx/ecto/query/query_test.exs @@ -29,12 +29,42 @@ defmodule Dx.Ecto.QueryTest do defp to_sql(query), do: Query.to_sql(Repo, query) + describe "expanded" do + test "condition on field" do + assert %{created_by_id: 7} |> expand_condition(List) == + {{:field, :created_by_id}, 7} + end + + test "non-translatable" do + assert %{created_by_id: [1, 2, 3], other_lists_by_creator: %{}} + |> expand_condition(List, extra_rules: Rules) == + { + :all, + [ + {{:field, :created_by_id}, [1, 2, 3]}, + {{:predicate, %{name: :other_lists_by_creator}, + [ + {{:query_all, Dx.Test.Schema.List, + [{{:field, :created_by_id}, {:ref, [field: :created_by_id]}}]}, + {:all, []}} + ]}, {:all, []}} + ] + } + end + end + test "condition on field" do query = Query.where(List, %{created_by_id: 7}) assert to_sql(query) =~ "\"created_by_id\" = 7" end + test "condition on association field" do + query = Query.where(List, %{created_by: %{last_name: "Vega"}}) + + assert to_sql(query) =~ "\"last_name\" = 'Vega'" + end + test "condition on predicate" do query = Query.where(List, %{state: :archived}, extra_rules: Rules) @@ -121,10 +151,7 @@ defmodule Dx.Ecto.QueryTest do test "raises error only listing non-translatable conditions" do conditions = %{created_by_id: [1, 2, 3], other_lists_by_creator: %{}} - errmsg = """ - Could not translate some conditions to SQL: - {:all, [other_lists_by_creator: %{}]}\ - """ + errmsg = ~r/Could not translate some conditions to SQL:/ assert_raise(Query.TranslationError, errmsg, fn -> Query.where(List, conditions, extra_rules: Rules) diff --git a/test/dx/map_test.exs b/test/dx/map_test.exs index e9fdb0cd..60a3744e 100644 --- a/test/dx/map_test.exs +++ b/test/dx/map_test.exs @@ -9,7 +9,7 @@ defmodule Dx.MapTest do [ {&Date.add/2, [{:ref, :due_on}, -1]}, {&Date.add/2, [{:ref, :due_on}, -7]} - ]} + ], type: {:array, Date}} infer prev_tasks_1: {:map, :prev_dates, :due_on, diff --git a/test/dx/ref_branching_test.exs b/test/dx/ref_branching_test.exs index caaf456b..daea537c 100644 --- a/test/dx/ref_branching_test.exs +++ b/test/dx/ref_branching_test.exs @@ -43,6 +43,14 @@ defmodule Dx.RefBranchingTest do Enum.map(tasks, &Map.take(&1, [:title, :due_on])) end + test "Raise error on triple-nested list in ref path", %{list: list} do + msg = ~r/Got \[:title\]/ + + assert_raise ArgumentError, msg, fn -> + Dx.load!(list, {:ref, [:tasks, [[:title], :due_on]]}) + end + end + test "Ref on fields map within list returns a nested list of these fields' values", %{ list: list, tasks: tasks diff --git a/test/dx/ref_test.exs b/test/dx/ref_test.exs index 30afb6af..a95342a3 100644 --- a/test/dx/ref_test.exs +++ b/test/dx/ref_test.exs @@ -1,7 +1,7 @@ defmodule Dx.RefTest do use ExUnit.Case, async: true - alias Dx.Test.Schema.Task + alias Dx.Test.Schema.{List, Task} import Test.Support.Factories @@ -24,6 +24,32 @@ defmodule Dx.RefTest do when: %{args: %{context: %{title: nil}}} end + describe "expanded" do + test "expands predicate" do + eval = Dx.Evaluation.from_options(extra_rules: Rules) + {expanded, type} = Dx.Schema.expand_mapping(:list_archived_at, Task, eval) + + assert expanded == + {:predicate, %{name: :list_archived_at}, + [ + {{:ref, + [ + {:assoc, :one, List, + %{ + name: :list, + ordered: false, + owner_key: :list_id, + related_key: :id, + unique: true + }}, + {:field, :archived_at} + ]}, {:all, []}} + ]} + + assert type == [:utc_datetime, nil] + end + end + setup do task = build(Task, %{list: %{archived_at: ~U[2021-10-31 19:59:03Z]}}) diff --git a/test/dx/rules/args_test.exs b/test/dx/rules/args_test.exs index b4bb72af..4b69ca25 100644 --- a/test/dx/rules/args_test.exs +++ b/test/dx/rules/args_test.exs @@ -33,11 +33,11 @@ defmodule Dx.Rules.ArgsTest do assert Dx.get!(list, :active?, extra_rules: [Rules], args: [active?: false]) == false assert Dx.get!(list, :active?, extra_rules: [Rules], args: [active?: "yep"]) == false - assert_raise(KeyError, fn -> + assert_raise(ArgumentError, fn -> Dx.get!(list, :active?, extra_rules: [Rules], args: [passive?: true]) end) - assert_raise(KeyError, fn -> + assert_raise(ArgumentError, fn -> Dx.get!(list, :active?, extra_rules: [Rules]) end) end @@ -51,7 +51,7 @@ defmodule Dx.Rules.ArgsTest do assert Dx.get!(list, :has_user_verified?, extra_rules: [Rules], args: [user: other_user]) == false - assert_raise(KeyError, fn -> + assert_raise(ArgumentError, fn -> Dx.get!(list, :has_user_verified?, extra_rules: [Rules]) end) end @@ -70,7 +70,7 @@ defmodule Dx.Rules.ArgsTest do assert Dx.get!(list, :created_by?, extra_rules: [Rules], args: [user: other_user]) == false - assert_raise(KeyError, fn -> + assert_raise(ArgumentError, fn -> Dx.get!(list, :created_by?, extra_rules: [Rules]) end) end diff --git a/test/dx/schema/type_test.exs b/test/dx/schema/type_test.exs new file mode 100644 index 00000000..02b44b69 --- /dev/null +++ b/test/dx/schema/type_test.exs @@ -0,0 +1,5 @@ +defmodule Dx.Schema.TypeTest do + use ExUnit.Case, async: true + + doctest Dx.Schema.Type, import: true +end diff --git a/test/dx/schema_test.exs b/test/dx/schema_test.exs new file mode 100644 index 00000000..791e0ce9 --- /dev/null +++ b/test/dx/schema_test.exs @@ -0,0 +1,145 @@ +defmodule Dx.SchemaTest do + use Dx.Test.DataCase + + doctest Dx.Schema, import: true + + describe "expand_mapping" do + defmodule TaskRules do + use Dx.Rules, for: Task + + infer completed?: false, when: %{completed_at: nil} + infer completed?: true + + infer prev_dates: + {&Date.range/2, + [ + {&Date.add/2, [{:ref, :due_on}, -1]}, + {&Date.add/2, [{:ref, :due_on}, -7]} + ], type: {:array, Date}} + + infer prev_tasks_1: + {:map, :prev_dates, :due_on, + {:query_one, Task, + due_on: {:bound, :due_on}, created_by_id: {:ref, :created_by_id}}} + + infer prev_tasks_2: + {:map, :prev_dates, {:bind, :due_on}, + {:query_one, Task, + due_on: {:bound, :due_on}, created_by_id: {:ref, :created_by_id}}} + + infer prev_tasks_3: + {:map, :prev_dates, {:bind, :due_on, %{}}, + {:query_one, Task, + due_on: {:bound, :due_on}, created_by_id: {:ref, :created_by_id}}} + end + + defmodule Rules do + use Dx.Rules, for: List + + import_rules TaskRules + + infer archived?: true, when: %{archived_at: {:not, nil}} + infer archived?: false + + infer state: :archived, when: %{archived?: true} + infer state: :in_progress, when: %{tasks: %{completed?: true}} + infer state: :ready, when: %{tasks: %{}} + infer state: :empty + end + + test "produces correct plan" do + {expanded, type} = expand_result({:ref, :state}, List, extra_rules: Rules) + + assert expanded == + {:ref, + [ + {:predicate, %{name: :state}, + [ + archived: { + {:predicate, %{name: :archived?}, + [ + {true, {{:field, :archived_at}, {:not, nil}}}, + {false, {:all, []}} + ]}, + true + }, + in_progress: { + {:assoc, :many, Task, + %{ + name: :tasks, + ordered: false, + unique: true, + owner_key: :id, + related_key: :list_id + }}, + { + {:predicate, %{name: :completed?}, + [ + {false, {{:field, :completed_at}, nil}}, + {true, {:all, []}} + ]}, + true + } + }, + ready: { + {:assoc, :many, Task, + %{ + name: :tasks, + ordered: false, + unique: true, + owner_key: :id, + related_key: :list_id + }}, + {:all, []} + }, + empty: {:all, []} + ]} + ]} + + assert type == {:atom, [:archived, :in_progress, :ready, :empty]} + end + + test "combines booleans" do + {_expanded, type} = expand_result({:ref, :archived?}, List, extra_rules: Rules) + + assert type == :boolean + end + + test "has_many type" do + {_expanded, type} = expand_result({:ref, :tasks}, List, extra_rules: Rules) + + assert type == {:array, Task} + end + + test "belongs_to type" do + {_expanded, type} = expand_mapping(:list, Task, extra_rules: Rules) + + assert type == [List, nil] + end + + test "map primitive" do + {expanded, type} = expand_mapping(:prev_tasks_1, Task, extra_rules: Rules) + + assert expanded == + {:predicate, %{name: :prev_tasks_1}, + [ + {{:map, + {:predicate, %{name: :prev_dates}, + [ + {{&Date.range/2, + [ + {&Date.add/2, [{:ref, [{:field, :due_on}]}, -1]}, + {&Date.add/2, [{:ref, [{:field, :due_on}]}, -7]} + ]}, {:all, []}} + ]}, :due_on, + {:query_one, Task, + [ + {{:field, :due_on}, {:bound, :due_on}}, + {{:field, :created_by_id}, {:ref, [{:field, :created_by_id}]}} + ]}}, {:all, []}} + ]} + + assert type == [Task, nil] + end + end +end diff --git a/test/dx/util/enum_test.exs b/test/dx/util/enum_test.exs new file mode 100644 index 00000000..a663ac26 --- /dev/null +++ b/test/dx/util/enum_test.exs @@ -0,0 +1,5 @@ +defmodule Dx.Util.EnumTest do + use ExUnit.Case, async: true + + doctest Dx.Util.Enum, import: true +end diff --git a/test/support/data_case.ex b/test/support/data_case.ex index 40dc81bb..ecebf51d 100644 --- a/test/support/data_case.ex +++ b/test/support/data_case.ex @@ -21,6 +21,7 @@ defmodule Dx.Test.DataCase do import Test.Support.Factories import Test.Support.DateTimeHelpers + import Test.Support.SchemaHelpers end end diff --git a/test/support/schema_helpers.ex b/test/support/schema_helpers.ex new file mode 100644 index 00000000..78f8db86 --- /dev/null +++ b/test/support/schema_helpers.ex @@ -0,0 +1,17 @@ +defmodule Test.Support.SchemaHelpers do + def expand_mapping(mapping, type, opts \\ []) do + eval = opts |> Keyword.put_new(:root_type, type) |> Dx.Evaluation.from_options() + Dx.Schema.expand_mapping(mapping, type, eval) + end + + def expand_result(result, type, opts \\ []) do + eval = opts |> Keyword.put_new(:root_type, type) |> Dx.Evaluation.from_options() + Dx.Schema.expand_result(result, type, eval) + end + + def expand_condition(condition, type, opts \\ []) do + eval = Dx.Evaluation.from_options(opts) + {expanded, _binds} = Dx.Schema.expand_condition(condition, type, eval) + expanded + end +end