Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .iex.exs
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ Application.put_env(:sql, :ecto_repos, [SQL.Repo])
Application.put_env(:sql, SQL.Repo, username: "postgres", password: "postgres", hostname: "localhost", database: "sql_test#{System.get_env("MIX_TEST_PARTITION")}", pool: Ecto.Adapters.SQL.Sandbox, pool_size: 10)
Mix.Tasks.Ecto.Create.run(["-r", "SQL.Repo"])
SQL.Repo.start_link()
import SQL
105 changes: 105 additions & 0 deletions lib/adapters/ansi.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: 2025 DBVisor

defmodule SQL.Adapters.ANSI do
@moduledoc """
A SQL adapter for [ANSI](https://blog.ansi.org/sql-standard-iso-iec-9075-2023-ansi-x3-135/).
"""
@moduledoc since: "0.2.0"

use SQL.Token

def token_to_string(value, mod \\ __MODULE__)
def token_to_string(value, mod) when is_struct(value) do
to_string(%{value | module: mod})
end

def token_to_string({tag, _, [{:parens, _, _} = value]}, mod) when tag in ~w[integer float update]a do
"#{mod.token_to_string(tag)}#{mod.token_to_string(value)}"
end
def token_to_string({tag, _, value}, _mod) when tag in ~w[ident integer float]a do
"#{value}"
end
def token_to_string({tag, _}, mod) do
mod.token_to_string(tag)
end
def token_to_string({:comment, _, value}, _mod) do
"-- #{value}"
end
def token_to_string({:comments, _, value}, _mod) do
"\\* #{value} *\\"
end
def token_to_string({:double_quote, _, value}, _mod) do
"\"#{value}\""
end
def token_to_string({:quote, _, value}, _mod) do
"'#{value}'"
end
def token_to_string({:parens, _, value}, mod) do
"(#{mod.token_to_string(value)})"
end
def token_to_string({:colon, _, value}, mod) do
"; #{mod.token_to_string(value)}"
end
def token_to_string({:comma, _, value}, mod) do
", #{mod.token_to_string(value)}"
end
def token_to_string({tag, _, []}, mod) do
mod.token_to_string(tag)
end
def token_to_string({tag, _, [[_ | _] = left, right]}, mod) when tag in ~w[join]a do
"#{mod.token_to_string(left)} #{mod.token_to_string(tag)} #{mod.token_to_string(right)}"
end
def token_to_string({tag, _, [{:with = t, _, [left, right]}]}, mod) when tag in ~w[to]a do
"#{mod.token_to_string(tag)} #{mod.token_to_string(left)} #{mod.token_to_string(t)} #{mod.token_to_string(right)}"
end
def token_to_string({tag, _, value}, mod) when tag in ~w[select from fetch limit where order offset group having with join by distinct create type drop insert alter table add into delete update start grant revoke set declare open close commit rollback references recursive]a do
"#{mod.token_to_string(tag)} #{mod.token_to_string(value)}"
end
def token_to_string({:on = tag, _, [source, as, value]}, mod) do
"#{mod.token_to_string(source)} #{mod.token_to_string(as)} #{mod.token_to_string(tag)} #{mod.token_to_string(value)}"
end
def token_to_string({tag, _, [left, [{:all = t, _, right}]]}, mod) when tag in ~w[union except intersect]a do
"#{mod.token_to_string(left)} #{mod.token_to_string(tag)} #{mod.token_to_string(t)} #{mod.token_to_string(right)}"
end
def token_to_string({:between = tag, _, [{:not = t, _, right}, left]}, mod) do
"#{mod.token_to_string(right)} #{mod.token_to_string(t)} #{mod.token_to_string(tag)} #{mod.token_to_string(left)}"
end
def token_to_string({tag, _, [left, right]}, mod) when tag in ~w[:: [\] <> <= >= != || + - ^ * / % < > = like ilike as union except intersect between and or on is not in cursor for to]a do
"#{mod.token_to_string(left)} #{mod.token_to_string(tag)} #{mod.token_to_string(right)}"
end
def token_to_string({tag, _, [{:parens, _, _} = value]}, mod) when tag not in ~w[in on]a do
"#{mod.token_to_string(tag)}#{mod.token_to_string(value)}"
end
def token_to_string({tag, _, values}, mod) when tag in ~w[not all between symmetric absolute relative forward backward on in for without]a do
"#{mod.token_to_string(tag)} #{mod.token_to_string(values)}"
end
def token_to_string({tag, _, [left, right]}, mod) when tag in ~w[.]a do
"#{mod.token_to_string(left)}.#{mod.token_to_string(right)}"
end
def token_to_string({tag, _, [left]}, mod) when tag in ~w[not]a do
"#{mod.token_to_string(left)} #{mod.token_to_string(tag)}"
end
def token_to_string({tag, _, [left]}, mod) when tag in ~w[asc desc isnull notnull]a do
"#{mod.token_to_string(left)} #{mod.token_to_string(tag)}"
end
def token_to_string({:binding, _, [idx]}, _mod) when is_integer(idx) do
"?"
end
def token_to_string({:binding, _, value}, _mod) do
"{{#{value}}}"
end
def token_to_string(:asterisk, _mod) do
"*"
end
def token_to_string(value, _mod) when is_atom(value) do
"#{value}"
end
def token_to_string(values, mod) when is_list(values) do
Enum.reduce(values, "", fn
token, "" -> mod.token_to_string(token)
{:comma, _, _} = token, acc -> acc <> mod.token_to_string(token)
token, acc -> acc <> " " <> mod.token_to_string(token)
end)
end
end
14 changes: 14 additions & 0 deletions lib/adapters/mysql.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: 2025 DBVisor

defmodule SQL.Adapters.MySQL do
@moduledoc """
A SQL adapter for [MySQL](https://www.mysql.com).
"""
@moduledoc since: "0.2.0"

use SQL.Token

def token_to_string(value, mod \\ __MODULE__)
def token_to_string(token, mod), do: SQL.Adapters.ANSI.token_to_string(token, mod)
end
17 changes: 17 additions & 0 deletions lib/adapters/postgres.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: 2025 DBVisor

defmodule SQL.Adapters.Postgres do
@moduledoc """
A SQL adapter for [PostgreSQL](https://www.postgresql.org).
"""
@moduledoc since: "0.2.0"

use SQL.Token

def token_to_string(value, mod \\ __MODULE__)
def token_to_string({:not, _, [left, {:in, _, [{:binding, _, _} = right]}]}, mod), do: "#{mod.token_to_string(left)} != ANY(#{mod.token_to_string(right)})"
def token_to_string({:in, _, [left, {:binding, _, _} = right]}, mod), do: "#{mod.token_to_string(left)} = ANY(#{mod.token_to_string(right)})"
def token_to_string({:binding, _, [idx]}, _mod) when is_integer(idx), do: "$#{idx}"
def token_to_string(token, mod), do: SQL.Adapters.ANSI.token_to_string(token, mod)
end
15 changes: 15 additions & 0 deletions lib/adapters/tds.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: 2025 DBVisor

defmodule SQL.Adapters.TDS do
@moduledoc """
A SQL adapter for [TDS](https://www.microsoft.com/en-ca/sql-server).
"""
@moduledoc since: "0.2.0"

use SQL.Token

def token_to_string(value, mod \\ __MODULE__)
def token_to_string({:binding, _, [idx]}, _mod) when is_integer(idx), do: "@#{idx}"
def token_to_string(token, mod), do: SQL.Adapters.ANSI.token_to_string(token, mod)
end
103 changes: 103 additions & 0 deletions lib/bnf.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: 2025 DBVisor
# https://standards.iso.org/iso-iec/9075/-2/ed-6/en/
# https://standards.iso.org/ittf/PubliclyAvailableStandards/ISO_IEC_9075-1_2023_ed_6_-_id_76583_Publication_PDF_(en).zip
# 0. \w(?![^<]*>)
# 1. <[^>]*>\.{3} repeat non-terminal rule
# 2. ({.+}...) repeat group
# 3. <[^>]*> - non-terminal
# 4. \[[^\]]*] - optional
# 5. \|(?![^\[]*\]) - or

defmodule SQL.BNF do
@moduledoc false

def parse() do
File.cwd!()
|> Path.join("standard/ISO_IEC_9075-2(E)_Foundation.bnf.txt")
|> File.read!()
|> parse()
end

def parse(binary) do
Map.new(parse(binary, :symbol, [], [], [], [], []))
end

defp parse(<<>>, _type, data, acc, symbol, expr, rules) do
merge(rules, symbol, expr ++ merge(acc, data))
end
defp parse(<<?*, rest::binary>>, :symbol = type, symbol, _acc, _data, _expr, rules) do
parse(rest, type, [], [], symbol, [], rules)
end
defp parse(<<?\n, ?\n, ?<, rest::binary>>, _type, data, acc, symbol, expr, rules) do
parse(<<?<, rest::binary>>, :symbol, [], [], [], [], merge(rules, symbol, expr ++ merge(acc, data)))
end
defp parse(<<?:, ?:, ?=, rest::binary>>, _type, data, acc, symbol, expr, rules) do
parse(rest, :expr, [], [], String.trim("#{data}"), [], merge(rules, symbol, expr ++ acc))
end
defp parse(<<?., rest::binary>>, type, [?!, ?! | _] = data, acc, symbol, expr, rules) do
parse(rest, type, [], merge(acc, "#{data ++ [?.]}"), symbol, expr, rules)
end
defp parse(<<?., ?., ?., rest::binary>>, type, data, acc, symbol, expr, rules) do
parse(rest, type, data ++ [?., ?., ?.], acc, symbol, expr, rules)
end
defp parse(<<?|, rest::binary>>, type, data, acc, symbol, expr, rules) do
parse(rest, type, data ++ [?|], acc, symbol, expr, rules)
end
defp parse(<<b, rest::binary>>, type, [] = data, acc, symbol, expr, rules) when b in [?\s, ?\t, ?\r, ?\n, ?\f] do
parse(rest, type, data, acc, symbol, expr, rules)
end
defp parse(<<b, rest::binary>>, type, data, acc, symbol, expr, rules) when b in [?\n] do
parse(rest, type, data, acc, symbol, expr, rules)
end
defp parse(<<b, rest::binary>>, type, data, acc, symbol, expr, rules) do
parse(rest, type, data ++ [b], acc, symbol, expr, rules)
end

defp merge([], []), do: []
defp merge(rules, []), do: rules
defp merge(rules, data), do: rules ++ [data]
defp merge(rules, [], []), do: rules
defp merge(rules, rule, expr) when is_list(rule), do: merge(rules, "#{rule}", expr)
defp merge(rules, rule, expr) when is_list(expr), do: merge(rules, rule, "#{expr}")
defp merge(rules, "<space>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, ["\u0020"]}] # 32 \u0020
defp merge(rules, "<identifier start>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}] # "Lu", "Ll", "Lt", "Lm", "Lo", or "Nl" Unicode.Set.match?(<<b::utf8>>, "[[:Lu:], [:Ll:], [:Lt:], [:Lm:], [:Lo:], [:Nl:]]")
defp merge(rules, "<identifier extend>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}] # 183 \u00B7 or "Mn", "Mc", "Nd", "Pc", or "Cf" Unicode.Set.match?(<<b::utf8>>, "[[:Mn:], [:Mc:], [:Nd:], [:Pc:], [:Cf:]]")
defp merge(rules, "<Unicode escape character>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, ["\\u"]}]
defp merge(rules, "<non-double quote character>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<whitespace>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, ["\u0009", "\u000D", "\u00A0", "\u00A0", "\u1680", "\u2000", "\u2001", "\u2002", "\u2003", "\u2004", "\u2005", "\u2006", "\u2007", "\u2008", "\u2009", "\u200A", "\u202F", "\u205F", "\u3000", "\u180E", "\u200B", "\u200C", "\u200D", "\u2060", "\uFEFF"]}]
defp merge(rules, "<truncating whitespace>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<bracketed comment contents>" = symbol, _expr), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<newline>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, ["\u000A", "\u000B", "\u000C", "\u000D", "\u0085", "\u2028", "\u2029"]}]
defp merge(rules, "<non-quote character>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<non-escaped character>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<escaped character>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<JSON path literal>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<JSON path string literal>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<JSON path numeric literal>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<JSON path identifier>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<JSON path key name>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<implementation-defined JSON representation option>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<preparable implementation-defined statement>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<SQLSTATE class code>" = symbol, _expr), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<SQLSTATE subclass code>" = symbol, _expr), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<host label identifier>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<host PL/I label variable>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<embedded SQL Ada program>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<Ada host identifier>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<embedded SQL C program>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<C host identifier>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<embedded SQL COBOL program>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<COBOL host identifier>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<embedded SQL Fortran program>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<Fortran host identifier>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<embedded SQL MUMPS program>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<MUMPS host identifier>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<embedded SQL Pascal program>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<Pascal host identifier>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<embedded SQL PL/I program>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<PL/I host identifier>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(rules, "<direct implementation-defined statement>" = symbol, "!! See the Syntax Rules."), do: rules ++ [{symbol, [:ignore]}]
defp merge(_rules, symbol, "!! See the Syntax Rules."), do: raise "Please apply rules for #{symbol} by referencing the PDF or https://github.com/ronsavage/SQL/blob/master/Syntax.rules.txt"
defp merge(rules, symbol, expr), do: rules ++ [{symbol, expr}]
end
50 changes: 0 additions & 50 deletions lib/compiler.ex

This file was deleted.

Loading