Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .iex.exs
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ Application.put_env(:sql, SQL.Repo, username: "postgres", password: "postgres",
Mix.Tasks.Ecto.Create.run(["-r", "SQL.Repo"])
SQL.Repo.start_link()
import SQL
alias SQL.BNF
12 changes: 12 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,18 @@

# Changelog

## v0.3.0 (2025-08-01)

### Enhancement
- Improve SQL generation with 57-344x compared to Ecto [#12](https://github.com/elixir-dbvisor/sql/pull/12).
- Fix bug for complex CTE [#15](https://github.com/elixir-dbvisor/sql/pull/15). Thanks to @kafaichoi
- Support for PostgresSQL GiST operators [#18](https://github.com/elixir-dbvisor/sql/pull/18). Thanks to @ibarchenkov
- `float` and `integer` nodes have now become `numeric` with metadata to distinguish `sign`, `whole` and `fractional` [#19](https://github.com/elixir-dbvisor/sql/pull/19).
- `keyword` nodes are now `ident` with metadata distinguish if it's a `keyword` [#19](https://github.com/elixir-dbvisor/sql/pull/19).
- `SQL.Lexer.lex/4` now returns `{:ok, context, tokens}` [#19](https://github.com/elixir-dbvisor/sql/pull/19).
- `SQL.Parser.parse/1` has become `SQL.Parser.parse/2` and takes `tokens` and `context` from `SQL.Lexer.lex/4` and returns `{:ok, context, tokens}` or raises an error [#19](https://github.com/elixir-dbvisor/sql/pull/19).


## v0.2.0 (2025-05-04)

### Enhancement
Expand Down
29 changes: 22 additions & 7 deletions bench.exs
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,29 @@ Application.put_env(:sql, :ecto_repos, [SQL.Repo])
Application.put_env(:sql, SQL.Repo, username: "postgres", password: "postgres", hostname: "localhost", database: "sql_test#{System.get_env("MIX_TEST_PARTITION")}", pool: Ecto.Adapters.SQL.Sandbox, pool_size: 10)
SQL.Repo.__adapter__().storage_up(SQL.Repo.config())
SQL.Repo.start_link()
range = 1..10_000
sql = ~SQL[with recursive temp (n, fact) as (select 0, 1 union all select n+1, (n+1)*fact from temp where n < 9)]
query = "temp" |> recursive_ctes(true) |> with_cte("temp", as: ^union_all(select("temp", [t], %{n: 0, fact: 1}), ^where(select("temp", [t], [t.n+1, t.n+1*t.fact]), [t], t.n < 9))) |> select([t], [t.n])
result = Tuple.to_list(SQL.Lexer.lex("with recursive temp (n, fact) as (select 0, 1 union all select n+1, (n+1)*fact from temp where n < 9)", __ENV__.file))
tokens = Enum.at(result, -1)
context = Enum.at(result, 1)
Benchee.run(
%{
"to_string" => fn -> for _ <- range, do: to_string(~SQL[with recursive temp (n, fact) as (select 0, 1 union all select n+1, (n+1)*fact from temp where n < 9)]) end,
"to_sql" => fn -> for _ <- range, do: SQL.to_sql(~SQL[with recursive temp (n, fact) as (select 0, 1 union all select n+1, (n+1)*fact from temp where n < 9)]) end,
"inspect" => fn -> for _ <- range, do: inspect(~SQL[with recursive temp (n, fact) as (select 0, 1 union all select n+1, (n+1)*fact from temp where n < 9)]) end,
"ecto" => fn -> for _ <- range, do: SQL.Repo.to_sql(:all, "temp" |> recursive_ctes(true) |> with_cte("temp", as: ^union_all(select("temp", [t], %{n: 0, fact: 1}), ^where(select("temp", [t], [t.n+1, t.n+1*t.fact]), [t], t.n < 9))) |> select([t], [t.n])) end
"comptime to_string" => fn _ -> to_string(sql) end,
"comptime to_sql" => fn _ -> SQL.to_sql(sql) end,
"comptime inspect" => fn _ -> inspect(sql) end,
"comptime ecto" => fn _ -> SQL.Repo.to_sql(:all, query) end,
"lex" => fn _ -> SQL.Lexer.lex("with recursive temp (n, fact) as (select 0, 1 union all select n+1, (n+1)*fact from temp where n < 9)", __ENV__.file) end,
"parse" => fn _ -> SQL.Parser.parse(tokens, context) end,
"runtime to_string" => fn _ -> to_string(~SQL[with recursive temp (n, fact) as (select 0, 1 union all select n+1, (n+1)*fact from temp where n < 9)]) end,
"runtime to_sql" => fn _ -> SQL.to_sql(~SQL[with recursive temp (n, fact) as (select 0, 1 union all select n+1, (n+1)*fact from temp where n < 9)]) end,
"runtime inspect" => fn _ -> inspect(~SQL[with recursive temp (n, fact) as (select 0, 1 union all select n+1, (n+1)*fact from temp where n < 9)]) end,
"runtime ecto" => fn _ -> SQL.Repo.to_sql(:all, "temp" |> recursive_ctes(true) |> with_cte("temp", as: ^union_all(select("temp", [t], %{n: 0, fact: 1}), ^where(select("temp", [t], [t.n+1, t.n+1*t.fact]), [t], t.n < 9))) |> select([t], [t.n])) end
},
time: 10,
memory_time: 2
inputs: %{
"Small" => Enum.to_list(1..1_000),
"Medium" => Enum.to_list(1..10_000),
"Bigger" => Enum.to_list(1..100_000)
},
memory_time: 2,
reduction_time: 2
)
77 changes: 51 additions & 26 deletions lib/adapters/ansi.ex
Original file line number Diff line number Diff line change
Expand Up @@ -11,31 +11,44 @@ defmodule SQL.Adapters.ANSI do

@doc false
def token_to_string(value, mod \\ __MODULE__)
def token_to_string(value, mod) when is_struct(value) do
to_string(%{value | module: mod})
def token_to_string(value, _mod) when is_struct(value) do
to_string(value)
end
def token_to_string({tag, _, [{:parens, _, _} = value]}, mod) when tag in ~w[integer float update]a do
def token_to_string({:*, _, []}, _mod) do
"*"
end
def token_to_string({:fun, _, [left, right]}, mod) do
"#{mod.token_to_string(left)}#{mod.token_to_string(right)}"
end
def token_to_string({tag, _, [{:paren, _, _} = value]}, mod) when tag in ~w[numeric update]a do
"#{mod.token_to_string(tag)}#{mod.token_to_string(value)}"
end
def token_to_string({tag, _, value}, _mod) when tag in ~w[ident integer float]a do
"#{value}"
def token_to_string({:ident, [{:keyword, :non_reserved},{:tag, tag}|_], [{:paren, _, _} = value]}, mod) do
"#{mod.token_to_string(tag)}#{mod.token_to_string(value)}"
end
def token_to_string({tag, _}, mod) do
mod.token_to_string(tag)
def token_to_string({:ident, [{:keyword, :non_reserved}, {:tag, tag}|_], [{:numeric, _, _} = value]}, mod) do
"#{mod.token_to_string(tag)} #{mod.token_to_string(value)}"
end
def token_to_string({_tag, [{:keyword, :non_reserved}|_], value}, mod) do
"#{mod.token_to_string(value)}"
end
def token_to_string({:numeric = tag, _, []}, mod), do: mod.token_to_string(tag)
def token_to_string({tag, _, value}, _mod) when tag in ~w[ident numeric]a do
value
end
def token_to_string({:comment, _, value}, _mod) do
"-- #{value}"
"--#{value}"
end
def token_to_string({:comments, _, value}, _mod) do
"\\* #{value} *\\"
"\\*#{value}*\\"
end
def token_to_string({:double_quote, _, value}, _mod) do
"\"#{value}\""
end
def token_to_string({:quote, _, value}, _mod) do
"'#{value}'"
end
def token_to_string({:parens, _, value}, mod) do
def token_to_string({:paren, _, value}, mod) do
"(#{mod.token_to_string(value)})"
end
def token_to_string({:bracket, _, value}, mod) do
Expand All @@ -47,21 +60,36 @@ defmodule SQL.Adapters.ANSI do
def token_to_string({:comma, _, value}, mod) do
", #{mod.token_to_string(value)}"
end
def token_to_string({:dot, _, [left, right]}, mod) do
"#{mod.token_to_string(left)}.#{mod.token_to_string(right)}"
end
def token_to_string({tag, _, []}, mod) do
mod.token_to_string(tag)
end
def token_to_string({tag, _, [[_ | _] = left, right]}, mod) when tag in ~w[join]a do
def token_to_string({:join=tag, _, [right]}, mod) do
"#{mod.token_to_string(tag)} #{mod.token_to_string(right)}"
end
def token_to_string({:join=tag, _, [{t, [{:keyword, :reserved}|_], _}=p, p1, p2, right]}, mod) when t != :as do
"#{mod.token_to_string(p)} #{mod.token_to_string(p1)} #{mod.token_to_string(p2)} #{mod.token_to_string(tag)} #{mod.token_to_string(right)}"
end
def token_to_string({:join=tag, _, [{t, [{:keyword, :reserved}|_], _}=p, p1, right]}, mod) when t != :as do
"#{mod.token_to_string(p)} #{mod.token_to_string(p1)} #{mod.token_to_string(tag)} #{mod.token_to_string(right)}"
end
def token_to_string({:join=tag, _, [{t, [{:keyword, :reserved}|_], _}=left, right]}, mod) when t != :as do
"#{mod.token_to_string(left)} #{mod.token_to_string(tag)} #{mod.token_to_string(right)}"
end
def token_to_string({tag, _, [{:with = t, _, [left, right]}]}, mod) when tag in ~w[to]a do
"#{mod.token_to_string(tag)} #{mod.token_to_string(left)} #{mod.token_to_string(t)} #{mod.token_to_string(right)}"
end
def token_to_string({tag, _, value}, mod) when tag in ~w[select from fetch limit where order offset group having with join by distinct create type drop insert alter table add into delete update start grant revoke set declare open close commit rollback references recursive]a do
def token_to_string({tag, _, value}, mod) when tag in ~w[select from fetch limit where order offset group having with join by distinct create type drop insert alter table add into delete update start grant revoke set declare open close commit rollback references recursive outer]a do
"#{mod.token_to_string(tag)} #{mod.token_to_string(value)}"
end
def token_to_string({:on = tag, _, [source, as, value]}, mod) do
"#{mod.token_to_string(source)} #{mod.token_to_string(as)} #{mod.token_to_string(tag)} #{mod.token_to_string(value)}"
end
def token_to_string({:not = tag, _, [ident | values]}, mod) when values != [] do
"#{mod.token_to_string(ident)} #{mod.token_to_string(tag)} #{mod.token_to_string(values)}"
end
def token_to_string({tag, _, [left, [{:all = t, _, right}]]}, mod) when tag in ~w[union except intersect]a do
"#{mod.token_to_string(left)} #{mod.token_to_string(tag)} #{mod.token_to_string(t)} #{mod.token_to_string(right)}"
end
Expand All @@ -71,19 +99,13 @@ defmodule SQL.Adapters.ANSI do
def token_to_string({tag, _, [left, right]}, mod) when tag in ~w[:: [\] <> <= >= != || + - ^ * / % < > = like ilike as union except intersect between and or on is not in cursor for to]a do
"#{mod.token_to_string(left)} #{mod.token_to_string(tag)} #{mod.token_to_string(right)}"
end
def token_to_string({tag, _, [{:parens, _, _} = value]}, mod) when tag not in ~w[in on]a do
def token_to_string({tag, _, [{:paren, _, _} = value]}, mod) when tag not in ~w[in on]a do
"#{mod.token_to_string(tag)}#{mod.token_to_string(value)}"
end
def token_to_string({tag, _, values}, mod) when tag in ~w[not all between symmetric absolute relative forward backward on in for without]a do
def token_to_string({tag, _, values}, mod) when tag in ~w[not all between asymmetric symmetric absolute relative forward backward on in for without]a do
"#{mod.token_to_string(tag)} #{mod.token_to_string(values)}"
end
def token_to_string({tag, _, [left, right]}, mod) when tag in ~w[.]a do
"#{mod.token_to_string(left)}.#{mod.token_to_string(right)}"
end
def token_to_string({tag, _, [left]}, mod) when tag in ~w[not]a do
"#{mod.token_to_string(left)} #{mod.token_to_string(tag)}"
end
def token_to_string({tag, _, [left]}, mod) when tag in ~w[asc desc isnull notnull]a do
def token_to_string({tag, _, [left]}, mod) when tag in ~w[asc desc isnull notnull not]a do
"#{mod.token_to_string(left)} #{mod.token_to_string(tag)}"
end
def token_to_string({:binding, _, [idx]}, _mod) when is_integer(idx) do
Expand All @@ -92,7 +114,7 @@ defmodule SQL.Adapters.ANSI do
def token_to_string({:binding, _, value}, _mod) do
"{{#{value}}}"
end
def token_to_string(:asterisk, _mod) do
def token_to_string(:*, _mod) do
"*"
end
def token_to_string(value, _mod) when is_atom(value) do
Expand All @@ -101,12 +123,15 @@ defmodule SQL.Adapters.ANSI do
def token_to_string(value, _mod) when is_binary(value) do
"'#{value}'"
end
def token_to_string(values, mod) when is_list(values) do
def token_to_string([h|_]=values, mod) when is_tuple(h) or is_tuple(hd(h)) do
values
|> Enum.reduce([], fn
token, [] = acc -> [acc | mod.token_to_string(token)]
{:comma, _, _} = token, acc -> [acc | mod.token_to_string(token)]
token, acc -> [acc, " " | mod.token_to_string(token)]
token, [] = acc -> [acc,mod.token_to_string(token, mod)]
{:comma, _, _} = token, acc -> [acc,mod.token_to_string(token, mod)]
token, acc -> [acc," ",mod.token_to_string(token, mod)]
end)
end
def token_to_string(value, _mod) do
value
end
end
3 changes: 3 additions & 0 deletions lib/adapters/postgres.ex
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,8 @@ defmodule SQL.Adapters.Postgres do
def token_to_string({:not, _, [left, {:in, _, [{:binding, _, _} = right]}]}, mod), do: "#{mod.token_to_string(left)} != ANY(#{mod.token_to_string(right)})"
def token_to_string({:in, _, [left, {:binding, _, _} = right]}, mod), do: "#{mod.token_to_string(left)} = ANY(#{mod.token_to_string(right)})"
def token_to_string({:binding, _, [idx]}, _mod) when is_integer(idx), do: "$#{idx}"
def token_to_string({tag, _, [left, right]}, mod) when tag in ~w[>>=]a do
"#{mod.token_to_string(left)} #{mod.token_to_string(tag)} #{mod.token_to_string(right)}"
end
def token_to_string(token, mod), do: SQL.Adapters.ANSI.token_to_string(token, mod)
end
Loading